190075Sobrien/* Control flow optimization code for GNU compiler.
290075Sobrien   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3169689Skan   1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
490075Sobrien
590075SobrienThis file is part of GCC.
690075Sobrien
790075SobrienGCC is free software; you can redistribute it and/or modify it under
890075Sobrienthe terms of the GNU General Public License as published by the Free
990075SobrienSoftware Foundation; either version 2, or (at your option) any later
1090075Sobrienversion.
1190075Sobrien
1290075SobrienGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1390075SobrienWARRANTY; without even the implied warranty of MERCHANTABILITY or
1490075SobrienFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1590075Sobrienfor more details.
1690075Sobrien
1790075SobrienYou should have received a copy of the GNU General Public License
1890075Sobrienalong with GCC; see the file COPYING.  If not, write to the Free
19169689SkanSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20169689Skan02110-1301, USA.  */
2190075Sobrien
22169689Skan/* This file contains optimizer of the control flow.  The main entry point is
2390075Sobrien   cleanup_cfg.  Following optimizations are performed:
2490075Sobrien
2590075Sobrien   - Unreachable blocks removal
26169689Skan   - Edge forwarding (edge to the forwarder block is forwarded to its
2790075Sobrien     successor.  Simplification of the branch instruction is performed by
2890075Sobrien     underlying infrastructure so branch can be converted to simplejump or
2990075Sobrien     eliminated).
3090075Sobrien   - Cross jumping (tail merging)
3190075Sobrien   - Conditional jump-around-simplejump simplification
3290075Sobrien   - Basic block merging.  */
3390075Sobrien
3490075Sobrien#include "config.h"
3590075Sobrien#include "system.h"
36132718Skan#include "coretypes.h"
37132718Skan#include "tm.h"
3890075Sobrien#include "rtl.h"
3990075Sobrien#include "hard-reg-set.h"
40169689Skan#include "regs.h"
4190075Sobrien#include "timevar.h"
4290075Sobrien#include "output.h"
4390075Sobrien#include "insn-config.h"
4490075Sobrien#include "flags.h"
4590075Sobrien#include "recog.h"
4690075Sobrien#include "toplev.h"
4790075Sobrien#include "cselib.h"
48117395Skan#include "params.h"
4990075Sobrien#include "tm_p.h"
5096263Sobrien#include "target.h"
51169689Skan#include "cfglayout.h"
52169689Skan#include "emit-rtl.h"
53169689Skan#include "tree-pass.h"
54169689Skan#include "cfgloop.h"
55132718Skan#include "expr.h"
5690075Sobrien
57169689Skan#define FORWARDER_BLOCK_P(BB) ((BB)->flags & BB_FORWARDER_BLOCK)
5890075Sobrien
59169689Skan/* Set to true when we are running first pass of try_optimize_cfg loop.  */
60169689Skanstatic bool first_pass;
61132718Skanstatic bool try_crossjump_to_edge (int, edge, edge);
62132718Skanstatic bool try_crossjump_bb (int, basic_block);
63132718Skanstatic bool outgoing_edges_match (int, basic_block, basic_block);
64132718Skanstatic int flow_find_cross_jump (int, basic_block, basic_block, rtx *, rtx *);
65169689Skanstatic bool old_insns_match_p (int, rtx, rtx);
6690075Sobrien
67132718Skanstatic void merge_blocks_move_predecessor_nojumps (basic_block, basic_block);
68132718Skanstatic void merge_blocks_move_successor_nojumps (basic_block, basic_block);
69132718Skanstatic bool try_optimize_cfg (int);
70132718Skanstatic bool try_simplify_condjump (basic_block);
71132718Skanstatic bool try_forward_edges (int, basic_block);
72132718Skanstatic edge thread_jump (int, edge, basic_block);
73132718Skanstatic bool mark_effect (rtx, bitmap);
74132718Skanstatic void notice_new_block (basic_block);
75132718Skanstatic void update_forwarder_flag (basic_block);
76132718Skanstatic int mentions_nonequal_regs (rtx *, void *);
77132718Skanstatic void merge_memattrs (rtx, rtx);
7890075Sobrien
7990075Sobrien/* Set flags for newly created block.  */
8090075Sobrien
8190075Sobrienstatic void
82132718Skannotice_new_block (basic_block bb)
8390075Sobrien{
8490075Sobrien  if (!bb)
8590075Sobrien    return;
8690075Sobrien
8790075Sobrien  if (forwarder_block_p (bb))
88169689Skan    bb->flags |= BB_FORWARDER_BLOCK;
8990075Sobrien}
9090075Sobrien
9190075Sobrien/* Recompute forwarder flag after block has been modified.  */
9290075Sobrien
9390075Sobrienstatic void
94132718Skanupdate_forwarder_flag (basic_block bb)
9590075Sobrien{
9690075Sobrien  if (forwarder_block_p (bb))
97169689Skan    bb->flags |= BB_FORWARDER_BLOCK;
9890075Sobrien  else
99169689Skan    bb->flags &= ~BB_FORWARDER_BLOCK;
10090075Sobrien}
10190075Sobrien
10290075Sobrien/* Simplify a conditional jump around an unconditional jump.
10390075Sobrien   Return true if something changed.  */
10490075Sobrien
10590075Sobrienstatic bool
106132718Skantry_simplify_condjump (basic_block cbranch_block)
10790075Sobrien{
10890075Sobrien  basic_block jump_block, jump_dest_block, cbranch_dest_block;
10990075Sobrien  edge cbranch_jump_edge, cbranch_fallthru_edge;
11090075Sobrien  rtx cbranch_insn;
11190075Sobrien
11290075Sobrien  /* Verify that there are exactly two successors.  */
113169689Skan  if (EDGE_COUNT (cbranch_block->succs) != 2)
11490075Sobrien    return false;
11590075Sobrien
11690075Sobrien  /* Verify that we've got a normal conditional branch at the end
11790075Sobrien     of the block.  */
118132718Skan  cbranch_insn = BB_END (cbranch_block);
11990075Sobrien  if (!any_condjump_p (cbranch_insn))
12090075Sobrien    return false;
12190075Sobrien
12290075Sobrien  cbranch_fallthru_edge = FALLTHRU_EDGE (cbranch_block);
12390075Sobrien  cbranch_jump_edge = BRANCH_EDGE (cbranch_block);
12490075Sobrien
12590075Sobrien  /* The next block must not have multiple predecessors, must not
12690075Sobrien     be the last block in the function, and must contain just the
12790075Sobrien     unconditional jump.  */
12890075Sobrien  jump_block = cbranch_fallthru_edge->dest;
129169689Skan  if (!single_pred_p (jump_block)
130117395Skan      || jump_block->next_bb == EXIT_BLOCK_PTR
13190075Sobrien      || !FORWARDER_BLOCK_P (jump_block))
13290075Sobrien    return false;
133169689Skan  jump_dest_block = single_succ (jump_block);
13490075Sobrien
135169689Skan  /* If we are partitioning hot/cold basic blocks, we don't want to
136169689Skan     mess up unconditional or indirect jumps that cross between hot
137169689Skan     and cold sections.
138169689Skan
139169689Skan     Basic block partitioning may result in some jumps that appear to
140169689Skan     be optimizable (or blocks that appear to be mergeable), but which really
141169689Skan     must be left untouched (they are required to make it safely across
142169689Skan     partition boundaries).  See the comments at the top of
143169689Skan     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
144169689Skan
145169689Skan  if (BB_PARTITION (jump_block) != BB_PARTITION (jump_dest_block)
146169689Skan      || (cbranch_jump_edge->flags & EDGE_CROSSING))
147169689Skan    return false;
148169689Skan
14990075Sobrien  /* The conditional branch must target the block after the
15090075Sobrien     unconditional branch.  */
15190075Sobrien  cbranch_dest_block = cbranch_jump_edge->dest;
15290075Sobrien
153169689Skan  if (cbranch_dest_block == EXIT_BLOCK_PTR
154169689Skan      || !can_fallthru (jump_block, cbranch_dest_block))
15590075Sobrien    return false;
15690075Sobrien
15790075Sobrien  /* Invert the conditional branch.  */
15890075Sobrien  if (!invert_jump (cbranch_insn, block_label (jump_dest_block), 0))
15990075Sobrien    return false;
16090075Sobrien
161169689Skan  if (dump_file)
162169689Skan    fprintf (dump_file, "Simplifying condjump %i around jump %i\n",
163132718Skan	     INSN_UID (cbranch_insn), INSN_UID (BB_END (jump_block)));
16490075Sobrien
16590075Sobrien  /* Success.  Update the CFG to match.  Note that after this point
16690075Sobrien     the edge variable names appear backwards; the redirection is done
16790075Sobrien     this way to preserve edge profile data.  */
16890075Sobrien  cbranch_jump_edge = redirect_edge_succ_nodup (cbranch_jump_edge,
16990075Sobrien						cbranch_dest_block);
17090075Sobrien  cbranch_fallthru_edge = redirect_edge_succ_nodup (cbranch_fallthru_edge,
17190075Sobrien						    jump_dest_block);
17290075Sobrien  cbranch_jump_edge->flags |= EDGE_FALLTHRU;
17390075Sobrien  cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
17490075Sobrien  update_br_prob_note (cbranch_block);
17590075Sobrien
17690075Sobrien  /* Delete the block with the unconditional jump, and clean up the mess.  */
177169689Skan  delete_basic_block (jump_block);
178169689Skan  tidy_fallthru_edge (cbranch_jump_edge);
179169689Skan  update_forwarder_flag (cbranch_block);
18090075Sobrien
18190075Sobrien  return true;
18290075Sobrien}
18390075Sobrien
18490075Sobrien/* Attempt to prove that operation is NOOP using CSElib or mark the effect
18590075Sobrien   on register.  Used by jump threading.  */
18690075Sobrien
18790075Sobrienstatic bool
188132718Skanmark_effect (rtx exp, regset nonequal)
18990075Sobrien{
19090075Sobrien  int regno;
19190075Sobrien  rtx dest;
19290075Sobrien  switch (GET_CODE (exp))
19390075Sobrien    {
19490075Sobrien      /* In case we do clobber the register, mark it as equal, as we know the
195169689Skan	 value is dead so it don't have to match.  */
196117395Skan    case CLOBBER:
197117395Skan      if (REG_P (XEXP (exp, 0)))
198117395Skan	{
199117395Skan	  dest = XEXP (exp, 0);
200117395Skan	  regno = REGNO (dest);
201117395Skan	  CLEAR_REGNO_REG_SET (nonequal, regno);
202117395Skan	  if (regno < FIRST_PSEUDO_REGISTER)
203117395Skan	    {
204169689Skan	      int n = hard_regno_nregs[regno][GET_MODE (dest)];
205117395Skan	      while (--n > 0)
206117395Skan		CLEAR_REGNO_REG_SET (nonequal, regno + n);
207117395Skan	    }
208117395Skan	}
209117395Skan      return false;
21090075Sobrien
211117395Skan    case SET:
212117395Skan      if (rtx_equal_for_cselib_p (SET_DEST (exp), SET_SRC (exp)))
21390075Sobrien	return false;
214117395Skan      dest = SET_DEST (exp);
215117395Skan      if (dest == pc_rtx)
216117395Skan	return false;
217117395Skan      if (!REG_P (dest))
218117395Skan	return true;
219117395Skan      regno = REGNO (dest);
220117395Skan      SET_REGNO_REG_SET (nonequal, regno);
221117395Skan      if (regno < FIRST_PSEUDO_REGISTER)
222117395Skan	{
223169689Skan	  int n = hard_regno_nregs[regno][GET_MODE (dest)];
224117395Skan	  while (--n > 0)
225117395Skan	    SET_REGNO_REG_SET (nonequal, regno + n);
226117395Skan	}
227117395Skan      return false;
22890075Sobrien
229117395Skan    default:
230117395Skan      return false;
23190075Sobrien    }
23290075Sobrien}
233117395Skan
234132718Skan/* Return nonzero if X is a register set in regset DATA.
235117395Skan   Called via for_each_rtx.  */
236117395Skanstatic int
237132718Skanmentions_nonequal_regs (rtx *x, void *data)
238117395Skan{
239117395Skan  regset nonequal = (regset) data;
240117395Skan  if (REG_P (*x))
241117395Skan    {
242117395Skan      int regno;
243117395Skan
244117395Skan      regno = REGNO (*x);
245117395Skan      if (REGNO_REG_SET_P (nonequal, regno))
246117395Skan	return 1;
247117395Skan      if (regno < FIRST_PSEUDO_REGISTER)
248117395Skan	{
249169689Skan	  int n = hard_regno_nregs[regno][GET_MODE (*x)];
250117395Skan	  while (--n > 0)
251117395Skan	    if (REGNO_REG_SET_P (nonequal, regno + n))
252117395Skan	      return 1;
253117395Skan	}
254117395Skan    }
255117395Skan  return 0;
256117395Skan}
25790075Sobrien/* Attempt to prove that the basic block B will have no side effects and
258132718Skan   always continues in the same edge if reached via E.  Return the edge
25990075Sobrien   if exist, NULL otherwise.  */
26090075Sobrien
26190075Sobrienstatic edge
262132718Skanthread_jump (int mode, edge e, basic_block b)
26390075Sobrien{
26490075Sobrien  rtx set1, set2, cond1, cond2, insn;
26590075Sobrien  enum rtx_code code1, code2, reversed_code2;
26690075Sobrien  bool reverse1 = false;
267169689Skan  unsigned i;
26890075Sobrien  regset nonequal;
26990075Sobrien  bool failed = false;
270169689Skan  reg_set_iterator rsi;
27190075Sobrien
272169689Skan  if (b->flags & BB_NONTHREADABLE_BLOCK)
273117395Skan    return NULL;
274117395Skan
27590075Sobrien  /* At the moment, we do handle only conditional jumps, but later we may
27690075Sobrien     want to extend this code to tablejumps and others.  */
277169689Skan  if (EDGE_COUNT (e->src->succs) != 2)
27890075Sobrien    return NULL;
279169689Skan  if (EDGE_COUNT (b->succs) != 2)
280117395Skan    {
281169689Skan      b->flags |= BB_NONTHREADABLE_BLOCK;
282117395Skan      return NULL;
283117395Skan    }
28490075Sobrien
28590075Sobrien  /* Second branch must end with onlyjump, as we will eliminate the jump.  */
286132718Skan  if (!any_condjump_p (BB_END (e->src)))
28790075Sobrien    return NULL;
28890075Sobrien
289132718Skan  if (!any_condjump_p (BB_END (b)) || !onlyjump_p (BB_END (b)))
290117395Skan    {
291169689Skan      b->flags |= BB_NONTHREADABLE_BLOCK;
292117395Skan      return NULL;
293117395Skan    }
294117395Skan
295132718Skan  set1 = pc_set (BB_END (e->src));
296132718Skan  set2 = pc_set (BB_END (b));
29790075Sobrien  if (((e->flags & EDGE_FALLTHRU) != 0)
29890075Sobrien      != (XEXP (SET_SRC (set1), 1) == pc_rtx))
29990075Sobrien    reverse1 = true;
30090075Sobrien
30190075Sobrien  cond1 = XEXP (SET_SRC (set1), 0);
30290075Sobrien  cond2 = XEXP (SET_SRC (set2), 0);
30390075Sobrien  if (reverse1)
304132718Skan    code1 = reversed_comparison_code (cond1, BB_END (e->src));
30590075Sobrien  else
30690075Sobrien    code1 = GET_CODE (cond1);
30790075Sobrien
30890075Sobrien  code2 = GET_CODE (cond2);
309132718Skan  reversed_code2 = reversed_comparison_code (cond2, BB_END (b));
31090075Sobrien
31190075Sobrien  if (!comparison_dominates_p (code1, code2)
31290075Sobrien      && !comparison_dominates_p (code1, reversed_code2))
31390075Sobrien    return NULL;
31490075Sobrien
31590075Sobrien  /* Ensure that the comparison operators are equivalent.
316132718Skan     ??? This is far too pessimistic.  We should allow swapped operands,
31790075Sobrien     different CCmodes, or for example comparisons for interval, that
31890075Sobrien     dominate even when operands are not equivalent.  */
31990075Sobrien  if (!rtx_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
32090075Sobrien      || !rtx_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
32190075Sobrien    return NULL;
32290075Sobrien
32390075Sobrien  /* Short circuit cases where block B contains some side effects, as we can't
32490075Sobrien     safely bypass it.  */
325132718Skan  for (insn = NEXT_INSN (BB_HEAD (b)); insn != NEXT_INSN (BB_END (b));
32690075Sobrien       insn = NEXT_INSN (insn))
32790075Sobrien    if (INSN_P (insn) && side_effects_p (PATTERN (insn)))
328117395Skan      {
329169689Skan	b->flags |= BB_NONTHREADABLE_BLOCK;
330117395Skan	return NULL;
331117395Skan      }
33290075Sobrien
333169689Skan  cselib_init (false);
33490075Sobrien
33590075Sobrien  /* First process all values computed in the source basic block.  */
336169689Skan  for (insn = NEXT_INSN (BB_HEAD (e->src));
337169689Skan       insn != NEXT_INSN (BB_END (e->src));
33890075Sobrien       insn = NEXT_INSN (insn))
33990075Sobrien    if (INSN_P (insn))
34090075Sobrien      cselib_process_insn (insn);
34190075Sobrien
342169689Skan  nonequal = BITMAP_ALLOC (NULL);
34390075Sobrien  CLEAR_REG_SET (nonequal);
34490075Sobrien
34590075Sobrien  /* Now assume that we've continued by the edge E to B and continue
34690075Sobrien     processing as if it were same basic block.
34790075Sobrien     Our goal is to prove that whole block is an NOOP.  */
34890075Sobrien
349169689Skan  for (insn = NEXT_INSN (BB_HEAD (b));
350169689Skan       insn != NEXT_INSN (BB_END (b)) && !failed;
35190075Sobrien       insn = NEXT_INSN (insn))
352117395Skan    {
353117395Skan      if (INSN_P (insn))
354117395Skan	{
355117395Skan	  rtx pat = PATTERN (insn);
35690075Sobrien
357117395Skan	  if (GET_CODE (pat) == PARALLEL)
358117395Skan	    {
359169689Skan	      for (i = 0; i < (unsigned)XVECLEN (pat, 0); i++)
360117395Skan		failed |= mark_effect (XVECEXP (pat, 0, i), nonequal);
361117395Skan	    }
362117395Skan	  else
363117395Skan	    failed |= mark_effect (pat, nonequal);
364117395Skan	}
36590075Sobrien
366117395Skan      cselib_process_insn (insn);
367117395Skan    }
36890075Sobrien
36990075Sobrien  /* Later we should clear nonequal of dead registers.  So far we don't
37090075Sobrien     have life information in cfg_cleanup.  */
37190075Sobrien  if (failed)
372117395Skan    {
373169689Skan      b->flags |= BB_NONTHREADABLE_BLOCK;
374117395Skan      goto failed_exit;
375117395Skan    }
376117395Skan
377117395Skan  /* cond2 must not mention any register that is not equal to the
378117395Skan     former block.  */
379117395Skan  if (for_each_rtx (&cond2, mentions_nonequal_regs, nonequal))
38090075Sobrien    goto failed_exit;
38190075Sobrien
38290075Sobrien  /* In case liveness information is available, we need to prove equivalence
38390075Sobrien     only of the live values.  */
38490075Sobrien  if (mode & CLEANUP_UPDATE_LIFE)
385169689Skan    AND_REG_SET (nonequal, b->il.rtl->global_live_at_end);
38690075Sobrien
387169689Skan  EXECUTE_IF_SET_IN_REG_SET (nonequal, 0, i, rsi)
388169689Skan    goto failed_exit;
38990075Sobrien
390169689Skan  BITMAP_FREE (nonequal);
39190075Sobrien  cselib_finish ();
39290075Sobrien  if ((comparison_dominates_p (code1, code2) != 0)
39390075Sobrien      != (XEXP (SET_SRC (set2), 1) == pc_rtx))
39490075Sobrien    return BRANCH_EDGE (b);
39590075Sobrien  else
39690075Sobrien    return FALLTHRU_EDGE (b);
39790075Sobrien
39890075Sobrienfailed_exit:
399169689Skan  BITMAP_FREE (nonequal);
40090075Sobrien  cselib_finish ();
40190075Sobrien  return NULL;
40290075Sobrien}
40390075Sobrien
40490075Sobrien/* Attempt to forward edges leaving basic block B.
40590075Sobrien   Return true if successful.  */
40690075Sobrien
40790075Sobrienstatic bool
408132718Skantry_forward_edges (int mode, basic_block b)
40990075Sobrien{
41090075Sobrien  bool changed = false;
411169689Skan  edge_iterator ei;
412169689Skan  edge e, *threaded_edges = NULL;
41390075Sobrien
414169689Skan  /* If we are partitioning hot/cold basic blocks, we don't want to
415169689Skan     mess up unconditional or indirect jumps that cross between hot
416169689Skan     and cold sections.
417169689Skan
418169689Skan     Basic block partitioning may result in some jumps that appear to
419169689Skan     be optimizable (or blocks that appear to be mergeable), but which really m
420169689Skan     ust be left untouched (they are required to make it safely across
421169689Skan     partition boundaries).  See the comments at the top of
422169689Skan     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
423169689Skan
424169689Skan  if (find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX))
425169689Skan    return false;
426169689Skan
427169689Skan  for (ei = ei_start (b->succs); (e = ei_safe_edge (ei)); )
42890075Sobrien    {
42990075Sobrien      basic_block target, first;
43090075Sobrien      int counter;
43190075Sobrien      bool threaded = false;
43290075Sobrien      int nthreaded_edges = 0;
433169689Skan      bool may_thread = first_pass | (b->flags & BB_DIRTY);
43490075Sobrien
43590075Sobrien      /* Skip complex edges because we don't know how to update them.
43690075Sobrien
437169689Skan	 Still handle fallthru edges, as we can succeed to forward fallthru
438169689Skan	 edge to the same place as the branch edge of conditional branch
439169689Skan	 and turn conditional branch to an unconditional branch.  */
44090075Sobrien      if (e->flags & EDGE_COMPLEX)
441169689Skan	{
442169689Skan	  ei_next (&ei);
443169689Skan	  continue;
444169689Skan	}
44590075Sobrien
44690075Sobrien      target = first = e->dest;
447169689Skan      counter = NUM_FIXED_BLOCKS;
44890075Sobrien
449169689Skan      /* If we are partitioning hot/cold basic_blocks, we don't want to mess
450169689Skan	 up jumps that cross between hot/cold sections.
451169689Skan
452169689Skan	 Basic block partitioning may result in some jumps that appear
453169689Skan	 to be optimizable (or blocks that appear to be mergeable), but which
454169689Skan	 really must be left untouched (they are required to make it safely
455169689Skan	 across partition boundaries).  See the comments at the top of
456169689Skan	 bb-reorder.c:partition_hot_cold_basic_blocks for complete
457169689Skan	 details.  */
458169689Skan
459169689Skan      if (first != EXIT_BLOCK_PTR
460169689Skan	  && find_reg_note (BB_END (first), REG_CROSSING_JUMP, NULL_RTX))
461169689Skan	return false;
462169689Skan
46390075Sobrien      while (counter < n_basic_blocks)
46490075Sobrien	{
46590075Sobrien	  basic_block new_target = NULL;
46690075Sobrien	  bool new_target_threaded = false;
467169689Skan	  may_thread |= target->flags & BB_DIRTY;
46890075Sobrien
46990075Sobrien	  if (FORWARDER_BLOCK_P (target)
470169689Skan	      && !(single_succ_edge (target)->flags & EDGE_CROSSING)
471169689Skan	      && single_succ (target) != EXIT_BLOCK_PTR)
47290075Sobrien	    {
47390075Sobrien	      /* Bypass trivial infinite loops.  */
474169689Skan	      new_target = single_succ (target);
475169689Skan	      if (target == new_target)
47690075Sobrien		counter = n_basic_blocks;
47790075Sobrien	    }
47890075Sobrien
47990075Sobrien	  /* Allow to thread only over one edge at time to simplify updating
48090075Sobrien	     of probabilities.  */
481169689Skan	  else if ((mode & CLEANUP_THREADING) && may_thread)
48290075Sobrien	    {
48390075Sobrien	      edge t = thread_jump (mode, e, target);
48490075Sobrien	      if (t)
48590075Sobrien		{
48690075Sobrien		  if (!threaded_edges)
487169689Skan		    threaded_edges = XNEWVEC (edge, n_basic_blocks);
48890075Sobrien		  else
48990075Sobrien		    {
49090075Sobrien		      int i;
49190075Sobrien
49290075Sobrien		      /* Detect an infinite loop across blocks not
49390075Sobrien			 including the start block.  */
49490075Sobrien		      for (i = 0; i < nthreaded_edges; ++i)
49590075Sobrien			if (threaded_edges[i] == t)
49690075Sobrien			  break;
49790075Sobrien		      if (i < nthreaded_edges)
49890075Sobrien			{
49990075Sobrien			  counter = n_basic_blocks;
50090075Sobrien			  break;
50190075Sobrien			}
50290075Sobrien		    }
50390075Sobrien
50490075Sobrien		  /* Detect an infinite loop across the start block.  */
50590075Sobrien		  if (t->dest == b)
50690075Sobrien		    break;
50790075Sobrien
508169689Skan		  gcc_assert (nthreaded_edges < n_basic_blocks - NUM_FIXED_BLOCKS);
50990075Sobrien		  threaded_edges[nthreaded_edges++] = t;
51090075Sobrien
51190075Sobrien		  new_target = t->dest;
51290075Sobrien		  new_target_threaded = true;
51390075Sobrien		}
51490075Sobrien	    }
51590075Sobrien
51690075Sobrien	  if (!new_target)
51790075Sobrien	    break;
51890075Sobrien
51990075Sobrien	  counter++;
52090075Sobrien	  target = new_target;
52190075Sobrien	  threaded |= new_target_threaded;
522117395Skan	}
52390075Sobrien
52490075Sobrien      if (counter >= n_basic_blocks)
52590075Sobrien	{
526169689Skan	  if (dump_file)
527169689Skan	    fprintf (dump_file, "Infinite loop in BB %i.\n",
52890075Sobrien		     target->index);
52990075Sobrien	}
53090075Sobrien      else if (target == first)
53190075Sobrien	; /* We didn't do anything.  */
53290075Sobrien      else
53390075Sobrien	{
53490075Sobrien	  /* Save the values now, as the edge may get removed.  */
53590075Sobrien	  gcov_type edge_count = e->count;
53690075Sobrien	  int edge_probability = e->probability;
53790075Sobrien	  int edge_frequency;
53890075Sobrien	  int n = 0;
53990075Sobrien
54090075Sobrien	  /* Don't force if target is exit block.  */
54190075Sobrien	  if (threaded && target != EXIT_BLOCK_PTR)
54290075Sobrien	    {
54390075Sobrien	      notice_new_block (redirect_edge_and_branch_force (e, target));
544169689Skan	      if (dump_file)
545169689Skan		fprintf (dump_file, "Conditionals threaded.\n");
54690075Sobrien	    }
54790075Sobrien	  else if (!redirect_edge_and_branch (e, target))
54890075Sobrien	    {
549169689Skan	      if (dump_file)
550169689Skan		fprintf (dump_file,
55190075Sobrien			 "Forwarding edge %i->%i to %i failed.\n",
55290075Sobrien			 b->index, e->dest->index, target->index);
553169689Skan	      ei_next (&ei);
55490075Sobrien	      continue;
55590075Sobrien	    }
55690075Sobrien
55790075Sobrien	  /* We successfully forwarded the edge.  Now update profile
55890075Sobrien	     data: for each edge we traversed in the chain, remove
55990075Sobrien	     the original edge's execution count.  */
56090075Sobrien	  edge_frequency = ((edge_probability * b->frequency
56190075Sobrien			     + REG_BR_PROB_BASE / 2)
56290075Sobrien			    / REG_BR_PROB_BASE);
56390075Sobrien
56490075Sobrien	  if (!FORWARDER_BLOCK_P (b) && forwarder_block_p (b))
565169689Skan	    b->flags |= BB_FORWARDER_BLOCK;
56690075Sobrien
56790075Sobrien	  do
56890075Sobrien	    {
56990075Sobrien	      edge t;
57090075Sobrien
571169689Skan	      if (!single_succ_p (first))
57290075Sobrien		{
573169689Skan		  gcc_assert (n < nthreaded_edges);
57490075Sobrien		  t = threaded_edges [n++];
575169689Skan		  gcc_assert (t->src == first);
576169689Skan		  update_bb_profile_for_threading (first, edge_frequency,
577169689Skan						   edge_count, t);
57890075Sobrien		  update_br_prob_note (first);
57990075Sobrien		}
58090075Sobrien	      else
58190075Sobrien		{
582169689Skan		  first->count -= edge_count;
583169689Skan		  if (first->count < 0)
584169689Skan		    first->count = 0;
585169689Skan		  first->frequency -= edge_frequency;
586169689Skan		  if (first->frequency < 0)
587169689Skan		    first->frequency = 0;
58890075Sobrien		  /* It is possible that as the result of
58990075Sobrien		     threading we've removed edge as it is
59090075Sobrien		     threaded to the fallthru edge.  Avoid
59190075Sobrien		     getting out of sync.  */
59290075Sobrien		  if (n < nthreaded_edges
59390075Sobrien		      && first == threaded_edges [n]->src)
59490075Sobrien		    n++;
595169689Skan		  t = single_succ_edge (first);
596117395Skan		}
59790075Sobrien
59890075Sobrien	      t->count -= edge_count;
59990075Sobrien	      if (t->count < 0)
60090075Sobrien		t->count = 0;
60190075Sobrien	      first = t->dest;
60290075Sobrien	    }
60390075Sobrien	  while (first != target);
60490075Sobrien
60590075Sobrien	  changed = true;
606169689Skan	  continue;
60790075Sobrien	}
608169689Skan      ei_next (&ei);
60990075Sobrien    }
61090075Sobrien
61190075Sobrien  if (threaded_edges)
61290075Sobrien    free (threaded_edges);
61390075Sobrien  return changed;
61490075Sobrien}
61590075Sobrien
61690075Sobrien
61790075Sobrien/* Blocks A and B are to be merged into a single block.  A has no incoming
61890075Sobrien   fallthru edge, so it can be moved before B without adding or modifying
61990075Sobrien   any jumps (aside from the jump from A to B).  */
62090075Sobrien
62190075Sobrienstatic void
622132718Skanmerge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
62390075Sobrien{
62490075Sobrien  rtx barrier;
625169689Skan  bool only_notes;
62690075Sobrien
627169689Skan  /* If we are partitioning hot/cold basic blocks, we don't want to
628169689Skan     mess up unconditional or indirect jumps that cross between hot
629169689Skan     and cold sections.
630169689Skan
631169689Skan     Basic block partitioning may result in some jumps that appear to
632169689Skan     be optimizable (or blocks that appear to be mergeable), but which really
633169689Skan     must be left untouched (they are required to make it safely across
634169689Skan     partition boundaries).  See the comments at the top of
635169689Skan     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
636169689Skan
637169689Skan  if (BB_PARTITION (a) != BB_PARTITION (b))
638169689Skan    return;
639169689Skan
640132718Skan  barrier = next_nonnote_insn (BB_END (a));
641169689Skan  gcc_assert (BARRIER_P (barrier));
64290075Sobrien  delete_insn (barrier);
64390075Sobrien
64490075Sobrien  /* Move block and loop notes out of the chain so that we do not
64590075Sobrien     disturb their order.
64690075Sobrien
64790075Sobrien     ??? A better solution would be to squeeze out all the non-nested notes
64890075Sobrien     and adjust the block trees appropriately.   Even better would be to have
64990075Sobrien     a tighter connection between block trees and rtl so that this is not
65090075Sobrien     necessary.  */
651169689Skan  only_notes = squeeze_notes (&BB_HEAD (a), &BB_END (a));
652169689Skan  gcc_assert (!only_notes);
65390075Sobrien
65490075Sobrien  /* Scramble the insn chain.  */
655132718Skan  if (BB_END (a) != PREV_INSN (BB_HEAD (b)))
656132718Skan    reorder_insns_nobb (BB_HEAD (a), BB_END (a), PREV_INSN (BB_HEAD (b)));
657117395Skan  a->flags |= BB_DIRTY;
65890075Sobrien
659169689Skan  if (dump_file)
660169689Skan    fprintf (dump_file, "Moved block %d before %d and merged.\n",
66190075Sobrien	     a->index, b->index);
66290075Sobrien
663117395Skan  /* Swap the records for the two blocks around.  */
66490075Sobrien
665117395Skan  unlink_block (a);
666117395Skan  link_block (a, b->prev_bb);
667117395Skan
66890075Sobrien  /* Now blocks A and B are contiguous.  Merge them.  */
669132718Skan  merge_blocks (a, b);
67090075Sobrien}
67190075Sobrien
67290075Sobrien/* Blocks A and B are to be merged into a single block.  B has no outgoing
67390075Sobrien   fallthru edge, so it can be moved after A without adding or modifying
67490075Sobrien   any jumps (aside from the jump from A to B).  */
67590075Sobrien
67690075Sobrienstatic void
677132718Skanmerge_blocks_move_successor_nojumps (basic_block a, basic_block b)
67890075Sobrien{
67990075Sobrien  rtx barrier, real_b_end;
680132718Skan  rtx label, table;
681169689Skan  bool only_notes;
68290075Sobrien
683169689Skan  /* If we are partitioning hot/cold basic blocks, we don't want to
684169689Skan     mess up unconditional or indirect jumps that cross between hot
685169689Skan     and cold sections.
686169689Skan
687169689Skan     Basic block partitioning may result in some jumps that appear to
688169689Skan     be optimizable (or blocks that appear to be mergeable), but which really
689169689Skan     must be left untouched (they are required to make it safely across
690169689Skan     partition boundaries).  See the comments at the top of
691169689Skan     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
692169689Skan
693169689Skan  if (BB_PARTITION (a) != BB_PARTITION (b))
694169689Skan    return;
695169689Skan
696132718Skan  real_b_end = BB_END (b);
69790075Sobrien
698132718Skan  /* If there is a jump table following block B temporarily add the jump table
699132718Skan     to block B so that it will also be moved to the correct location.  */
700132718Skan  if (tablejump_p (BB_END (b), &label, &table)
701132718Skan      && prev_active_insn (label) == BB_END (b))
70290075Sobrien    {
703132718Skan      BB_END (b) = table;
70490075Sobrien    }
70590075Sobrien
70690075Sobrien  /* There had better have been a barrier there.  Delete it.  */
707132718Skan  barrier = NEXT_INSN (BB_END (b));
708169689Skan  if (barrier && BARRIER_P (barrier))
70990075Sobrien    delete_insn (barrier);
71090075Sobrien
71190075Sobrien  /* Move block and loop notes out of the chain so that we do not
71290075Sobrien     disturb their order.
71390075Sobrien
71490075Sobrien     ??? A better solution would be to squeeze out all the non-nested notes
71590075Sobrien     and adjust the block trees appropriately.   Even better would be to have
71690075Sobrien     a tighter connection between block trees and rtl so that this is not
71790075Sobrien     necessary.  */
718169689Skan  only_notes = squeeze_notes (&BB_HEAD (b), &BB_END (b));
719169689Skan  gcc_assert (!only_notes);
72090075Sobrien
721169689Skan
72290075Sobrien  /* Scramble the insn chain.  */
723132718Skan  reorder_insns_nobb (BB_HEAD (b), BB_END (b), BB_END (a));
72490075Sobrien
72590075Sobrien  /* Restore the real end of b.  */
726132718Skan  BB_END (b) = real_b_end;
72790075Sobrien
728169689Skan  if (dump_file)
729169689Skan    fprintf (dump_file, "Moved block %d after %d and merged.\n",
73090075Sobrien	     b->index, a->index);
731117395Skan
732117395Skan  /* Now blocks A and B are contiguous.  Merge them.  */
733132718Skan  merge_blocks (a, b);
73490075Sobrien}
73590075Sobrien
73690075Sobrien/* Attempt to merge basic blocks that are potentially non-adjacent.
737132718Skan   Return NULL iff the attempt failed, otherwise return basic block
738132718Skan   where cleanup_cfg should continue.  Because the merging commonly
739132718Skan   moves basic block away or introduces another optimization
740132718Skan   possibility, return basic block just before B so cleanup_cfg don't
741132718Skan   need to iterate.
74290075Sobrien
743132718Skan   It may be good idea to return basic block before C in the case
744132718Skan   C has been moved after B and originally appeared earlier in the
745132718Skan   insn sequence, but we have no information available about the
746132718Skan   relative ordering of these two.  Hopefully it is not too common.  */
747132718Skan
748132718Skanstatic basic_block
749132718Skanmerge_blocks_move (edge e, basic_block b, basic_block c, int mode)
75090075Sobrien{
751132718Skan  basic_block next;
752169689Skan
753169689Skan  /* If we are partitioning hot/cold basic blocks, we don't want to
754169689Skan     mess up unconditional or indirect jumps that cross between hot
755169689Skan     and cold sections.
756169689Skan
757169689Skan     Basic block partitioning may result in some jumps that appear to
758169689Skan     be optimizable (or blocks that appear to be mergeable), but which really
759169689Skan     must be left untouched (they are required to make it safely across
760169689Skan     partition boundaries).  See the comments at the top of
761169689Skan     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
762169689Skan
763169689Skan  if (BB_PARTITION (b) != BB_PARTITION (c))
764132718Skan    return NULL;
76590075Sobrien
766169689Skan
767169689Skan
76890075Sobrien  /* If B has a fallthru edge to C, no need to move anything.  */
76990075Sobrien  if (e->flags & EDGE_FALLTHRU)
77090075Sobrien    {
77190075Sobrien      int b_index = b->index, c_index = c->index;
772132718Skan      merge_blocks (b, c);
77390075Sobrien      update_forwarder_flag (b);
77490075Sobrien
775169689Skan      if (dump_file)
776169689Skan	fprintf (dump_file, "Merged %d and %d without moving.\n",
777117395Skan		 b_index, c_index);
77890075Sobrien
779132718Skan      return b->prev_bb == ENTRY_BLOCK_PTR ? b : b->prev_bb;
78090075Sobrien    }
78190075Sobrien
78290075Sobrien  /* Otherwise we will need to move code around.  Do that only if expensive
78390075Sobrien     transformations are allowed.  */
78490075Sobrien  else if (mode & CLEANUP_EXPENSIVE)
78590075Sobrien    {
78690075Sobrien      edge tmp_edge, b_fallthru_edge;
78790075Sobrien      bool c_has_outgoing_fallthru;
78890075Sobrien      bool b_has_incoming_fallthru;
789169689Skan      edge_iterator ei;
79090075Sobrien
79190075Sobrien      /* Avoid overactive code motion, as the forwarder blocks should be
792169689Skan	 eliminated by edge redirection instead.  One exception might have
79390075Sobrien	 been if B is a forwarder block and C has no fallthru edge, but
79490075Sobrien	 that should be cleaned up by bb-reorder instead.  */
79590075Sobrien      if (FORWARDER_BLOCK_P (b) || FORWARDER_BLOCK_P (c))
796132718Skan	return NULL;
79790075Sobrien
79890075Sobrien      /* We must make sure to not munge nesting of lexical blocks,
79990075Sobrien	 and loop notes.  This is done by squeezing out all the notes
80090075Sobrien	 and leaving them there to lie.  Not ideal, but functional.  */
80190075Sobrien
802169689Skan      FOR_EACH_EDGE (tmp_edge, ei, c->succs)
80390075Sobrien	if (tmp_edge->flags & EDGE_FALLTHRU)
80490075Sobrien	  break;
80590075Sobrien
80690075Sobrien      c_has_outgoing_fallthru = (tmp_edge != NULL);
80790075Sobrien
808169689Skan      FOR_EACH_EDGE (tmp_edge, ei, b->preds)
80990075Sobrien	if (tmp_edge->flags & EDGE_FALLTHRU)
81090075Sobrien	  break;
81190075Sobrien
81290075Sobrien      b_has_incoming_fallthru = (tmp_edge != NULL);
81390075Sobrien      b_fallthru_edge = tmp_edge;
814132718Skan      next = b->prev_bb;
815132718Skan      if (next == c)
816132718Skan	next = next->prev_bb;
81790075Sobrien
81890075Sobrien      /* Otherwise, we're going to try to move C after B.  If C does
81990075Sobrien	 not have an outgoing fallthru, then it can be moved
82090075Sobrien	 immediately after B without introducing or modifying jumps.  */
82190075Sobrien      if (! c_has_outgoing_fallthru)
82290075Sobrien	{
82390075Sobrien	  merge_blocks_move_successor_nojumps (b, c);
824169689Skan	  return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
82590075Sobrien	}
82690075Sobrien
82790075Sobrien      /* If B does not have an incoming fallthru, then it can be moved
82890075Sobrien	 immediately before C without introducing or modifying jumps.
82990075Sobrien	 C cannot be the first block, so we do not have to worry about
83090075Sobrien	 accessing a non-existent block.  */
83190075Sobrien
83290075Sobrien      if (b_has_incoming_fallthru)
83390075Sobrien	{
83490075Sobrien	  basic_block bb;
83590075Sobrien
83690075Sobrien	  if (b_fallthru_edge->src == ENTRY_BLOCK_PTR)
837132718Skan	    return NULL;
83890075Sobrien	  bb = force_nonfallthru (b_fallthru_edge);
83990075Sobrien	  if (bb)
84090075Sobrien	    notice_new_block (bb);
84190075Sobrien	}
84290075Sobrien
84390075Sobrien      merge_blocks_move_predecessor_nojumps (b, c);
844132718Skan      return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
84590075Sobrien    }
84690075Sobrien
847132718Skan  return NULL;
84890075Sobrien}
84990075Sobrien
85090075Sobrien
851132718Skan/* Removes the memory attributes of MEM expression
852132718Skan   if they are not equal.  */
853132718Skan
854132718Skanvoid
855132718Skanmerge_memattrs (rtx x, rtx y)
856132718Skan{
857132718Skan  int i;
858132718Skan  int j;
859132718Skan  enum rtx_code code;
860132718Skan  const char *fmt;
861132718Skan
862132718Skan  if (x == y)
863132718Skan    return;
864132718Skan  if (x == 0 || y == 0)
865132718Skan    return;
866132718Skan
867132718Skan  code = GET_CODE (x);
868132718Skan
869132718Skan  if (code != GET_CODE (y))
870132718Skan    return;
871132718Skan
872132718Skan  if (GET_MODE (x) != GET_MODE (y))
873132718Skan    return;
874132718Skan
875132718Skan  if (code == MEM && MEM_ATTRS (x) != MEM_ATTRS (y))
876132718Skan    {
877132718Skan      if (! MEM_ATTRS (x))
878132718Skan	MEM_ATTRS (y) = 0;
879132718Skan      else if (! MEM_ATTRS (y))
880132718Skan	MEM_ATTRS (x) = 0;
881169689Skan      else
882132718Skan	{
883169689Skan	  rtx mem_size;
884169689Skan
885132718Skan	  if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
886132718Skan	    {
887132718Skan	      set_mem_alias_set (x, 0);
888132718Skan	      set_mem_alias_set (y, 0);
889132718Skan	    }
890169689Skan
891132718Skan	  if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y)))
892132718Skan	    {
893132718Skan	      set_mem_expr (x, 0);
894132718Skan	      set_mem_expr (y, 0);
895132718Skan	      set_mem_offset (x, 0);
896132718Skan	      set_mem_offset (y, 0);
897132718Skan	    }
898132718Skan	  else if (MEM_OFFSET (x) != MEM_OFFSET (y))
899132718Skan	    {
900132718Skan	      set_mem_offset (x, 0);
901132718Skan	      set_mem_offset (y, 0);
902132718Skan	    }
903132718Skan
904169689Skan	  if (!MEM_SIZE (x))
905169689Skan	    mem_size = NULL_RTX;
906169689Skan	  else if (!MEM_SIZE (y))
907169689Skan	    mem_size = NULL_RTX;
908169689Skan	  else
909169689Skan	    mem_size = GEN_INT (MAX (INTVAL (MEM_SIZE (x)),
910169689Skan				     INTVAL (MEM_SIZE (y))));
911169689Skan	  set_mem_size (x, mem_size);
912169689Skan	  set_mem_size (y, mem_size);
913169689Skan
914132718Skan	  set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y)));
915132718Skan	  set_mem_align (y, MEM_ALIGN (x));
916132718Skan	}
917132718Skan    }
918169689Skan
919132718Skan  fmt = GET_RTX_FORMAT (code);
920132718Skan  for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
921132718Skan    {
922132718Skan      switch (fmt[i])
923132718Skan	{
924132718Skan	case 'E':
925132718Skan	  /* Two vectors must have the same length.  */
926132718Skan	  if (XVECLEN (x, i) != XVECLEN (y, i))
927132718Skan	    return;
928132718Skan
929132718Skan	  for (j = 0; j < XVECLEN (x, i); j++)
930132718Skan	    merge_memattrs (XVECEXP (x, i, j), XVECEXP (y, i, j));
931132718Skan
932132718Skan	  break;
933132718Skan
934132718Skan	case 'e':
935132718Skan	  merge_memattrs (XEXP (x, i), XEXP (y, i));
936132718Skan	}
937132718Skan    }
938132718Skan  return;
939132718Skan}
940132718Skan
941132718Skan
94290075Sobrien/* Return true if I1 and I2 are equivalent and thus can be crossjumped.  */
94390075Sobrien
94490075Sobrienstatic bool
945169689Skanold_insns_match_p (int mode ATTRIBUTE_UNUSED, rtx i1, rtx i2)
94690075Sobrien{
94790075Sobrien  rtx p1, p2;
94890075Sobrien
94990075Sobrien  /* Verify that I1 and I2 are equivalent.  */
95090075Sobrien  if (GET_CODE (i1) != GET_CODE (i2))
95190075Sobrien    return false;
95290075Sobrien
95390075Sobrien  p1 = PATTERN (i1);
95490075Sobrien  p2 = PATTERN (i2);
95590075Sobrien
95690075Sobrien  if (GET_CODE (p1) != GET_CODE (p2))
95790075Sobrien    return false;
95890075Sobrien
95990075Sobrien  /* If this is a CALL_INSN, compare register usage information.
96090075Sobrien     If we don't check this on stack register machines, the two
96190075Sobrien     CALL_INSNs might be merged leaving reg-stack.c with mismatching
96290075Sobrien     numbers of stack registers in the same basic block.
96390075Sobrien     If we don't check this on machines with delay slots, a delay slot may
96490075Sobrien     be filled that clobbers a parameter expected by the subroutine.
96590075Sobrien
96690075Sobrien     ??? We take the simple route for now and assume that if they're
96790075Sobrien     equal, they were constructed identically.  */
96890075Sobrien
969169689Skan  if (CALL_P (i1)
970117395Skan      && (!rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
971169689Skan			CALL_INSN_FUNCTION_USAGE (i2))
972117395Skan	  || SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2)))
97390075Sobrien    return false;
97490075Sobrien
97590075Sobrien#ifdef STACK_REGS
97690075Sobrien  /* If cross_jump_death_matters is not 0, the insn's mode
97790075Sobrien     indicates whether or not the insn contains any stack-like
97890075Sobrien     regs.  */
97990075Sobrien
98090075Sobrien  if ((mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
98190075Sobrien    {
98290075Sobrien      /* If register stack conversion has already been done, then
983169689Skan	 death notes must also be compared before it is certain that
984169689Skan	 the two instruction streams match.  */
98590075Sobrien
98690075Sobrien      rtx note;
98790075Sobrien      HARD_REG_SET i1_regset, i2_regset;
98890075Sobrien
98990075Sobrien      CLEAR_HARD_REG_SET (i1_regset);
99090075Sobrien      CLEAR_HARD_REG_SET (i2_regset);
99190075Sobrien
99290075Sobrien      for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
99390075Sobrien	if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
99490075Sobrien	  SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
99590075Sobrien
99690075Sobrien      for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
99790075Sobrien	if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
99890075Sobrien	  SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
99990075Sobrien
100090075Sobrien      GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
100190075Sobrien
100290075Sobrien      return false;
100390075Sobrien
100490075Sobrien    done:
100590075Sobrien      ;
100690075Sobrien    }
100790075Sobrien#endif
100890075Sobrien
100990075Sobrien  if (reload_completed
1010132718Skan      ? rtx_renumbered_equal_p (p1, p2) : rtx_equal_p (p1, p2))
1011132718Skan    return true;
1012132718Skan
1013132718Skan  /* Do not do EQUIV substitution after reload.  First, we're undoing the
1014132718Skan     work of reload_cse.  Second, we may be undoing the work of the post-
1015132718Skan     reload splitting pass.  */
1016132718Skan  /* ??? Possibly add a new phase switch variable that can be used by
1017132718Skan     targets to disallow the troublesome insns after splitting.  */
1018132718Skan  if (!reload_completed)
101990075Sobrien    {
102090075Sobrien      /* The following code helps take care of G++ cleanups.  */
102190075Sobrien      rtx equiv1 = find_reg_equal_equiv_note (i1);
102290075Sobrien      rtx equiv2 = find_reg_equal_equiv_note (i2);
102390075Sobrien
102490075Sobrien      if (equiv1 && equiv2
102590075Sobrien	  /* If the equivalences are not to a constant, they may
102690075Sobrien	     reference pseudos that no longer exist, so we can't
102790075Sobrien	     use them.  */
102890075Sobrien	  && (! reload_completed
102990075Sobrien	      || (CONSTANT_P (XEXP (equiv1, 0))
103090075Sobrien		  && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))))
103190075Sobrien	{
103290075Sobrien	  rtx s1 = single_set (i1);
103390075Sobrien	  rtx s2 = single_set (i2);
103490075Sobrien	  if (s1 != 0 && s2 != 0
103590075Sobrien	      && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
103690075Sobrien	    {
103790075Sobrien	      validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
103890075Sobrien	      validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
103990075Sobrien	      if (! rtx_renumbered_equal_p (p1, p2))
104090075Sobrien		cancel_changes (0);
104190075Sobrien	      else if (apply_change_group ())
104290075Sobrien		return true;
104390075Sobrien	    }
104490075Sobrien	}
104590075Sobrien    }
104690075Sobrien
1047132718Skan  return false;
104890075Sobrien}
104990075Sobrien
105090075Sobrien/* Look through the insns at the end of BB1 and BB2 and find the longest
105190075Sobrien   sequence that are equivalent.  Store the first insns for that sequence
105290075Sobrien   in *F1 and *F2 and return the sequence length.
105390075Sobrien
105490075Sobrien   To simplify callers of this function, if the blocks match exactly,
105590075Sobrien   store the head of the blocks in *F1 and *F2.  */
105690075Sobrien
105790075Sobrienstatic int
1058132718Skanflow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1,
1059132718Skan		      basic_block bb2, rtx *f1, rtx *f2)
106090075Sobrien{
106190075Sobrien  rtx i1, i2, last1, last2, afterlast1, afterlast2;
106290075Sobrien  int ninsns = 0;
106390075Sobrien
106490075Sobrien  /* Skip simple jumps at the end of the blocks.  Complex jumps still
106590075Sobrien     need to be compared for equivalence, which we'll do below.  */
106690075Sobrien
1067132718Skan  i1 = BB_END (bb1);
106890075Sobrien  last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
106990075Sobrien  if (onlyjump_p (i1)
107090075Sobrien      || (returnjump_p (i1) && !side_effects_p (PATTERN (i1))))
107190075Sobrien    {
107290075Sobrien      last1 = i1;
107390075Sobrien      i1 = PREV_INSN (i1);
107490075Sobrien    }
107590075Sobrien
1076132718Skan  i2 = BB_END (bb2);
107790075Sobrien  if (onlyjump_p (i2)
107890075Sobrien      || (returnjump_p (i2) && !side_effects_p (PATTERN (i2))))
107990075Sobrien    {
108090075Sobrien      last2 = i2;
108190075Sobrien      /* Count everything except for unconditional jump as insn.  */
108290075Sobrien      if (!simplejump_p (i2) && !returnjump_p (i2) && last1)
108390075Sobrien	ninsns++;
108490075Sobrien      i2 = PREV_INSN (i2);
108590075Sobrien    }
108690075Sobrien
108790075Sobrien  while (true)
108890075Sobrien    {
108990075Sobrien      /* Ignore notes.  */
1090132718Skan      while (!INSN_P (i1) && i1 != BB_HEAD (bb1))
109190075Sobrien	i1 = PREV_INSN (i1);
109290075Sobrien
1093132718Skan      while (!INSN_P (i2) && i2 != BB_HEAD (bb2))
109490075Sobrien	i2 = PREV_INSN (i2);
109590075Sobrien
1096132718Skan      if (i1 == BB_HEAD (bb1) || i2 == BB_HEAD (bb2))
109790075Sobrien	break;
109890075Sobrien
1099169689Skan      if (!old_insns_match_p (mode, i1, i2))
110090075Sobrien	break;
110190075Sobrien
1102132718Skan      merge_memattrs (i1, i2);
1103132718Skan
1104117395Skan      /* Don't begin a cross-jump with a NOTE insn.  */
1105117395Skan      if (INSN_P (i1))
110690075Sobrien	{
110790075Sobrien	  /* If the merged insns have different REG_EQUAL notes, then
110890075Sobrien	     remove them.  */
110990075Sobrien	  rtx equiv1 = find_reg_equal_equiv_note (i1);
111090075Sobrien	  rtx equiv2 = find_reg_equal_equiv_note (i2);
111190075Sobrien
111290075Sobrien	  if (equiv1 && !equiv2)
111390075Sobrien	    remove_note (i1, equiv1);
111490075Sobrien	  else if (!equiv1 && equiv2)
111590075Sobrien	    remove_note (i2, equiv2);
111690075Sobrien	  else if (equiv1 && equiv2
111790075Sobrien		   && !rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
111890075Sobrien	    {
111990075Sobrien	      remove_note (i1, equiv1);
112090075Sobrien	      remove_note (i2, equiv2);
112190075Sobrien	    }
1122117395Skan
112390075Sobrien	  afterlast1 = last1, afterlast2 = last2;
112490075Sobrien	  last1 = i1, last2 = i2;
1125117395Skan	  ninsns++;
112690075Sobrien	}
112790075Sobrien
112890075Sobrien      i1 = PREV_INSN (i1);
112990075Sobrien      i2 = PREV_INSN (i2);
113090075Sobrien    }
113190075Sobrien
113290075Sobrien#ifdef HAVE_cc0
113390075Sobrien  /* Don't allow the insn after a compare to be shared by
113490075Sobrien     cross-jumping unless the compare is also shared.  */
113590075Sobrien  if (ninsns && reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
113690075Sobrien    last1 = afterlast1, last2 = afterlast2, ninsns--;
113790075Sobrien#endif
113890075Sobrien
113990075Sobrien  /* Include preceding notes and labels in the cross-jump.  One,
114090075Sobrien     this may bring us to the head of the blocks as requested above.
114190075Sobrien     Two, it keeps line number notes as matched as may be.  */
114290075Sobrien  if (ninsns)
114390075Sobrien    {
1144132718Skan      while (last1 != BB_HEAD (bb1) && !INSN_P (PREV_INSN (last1)))
114590075Sobrien	last1 = PREV_INSN (last1);
114690075Sobrien
1147169689Skan      if (last1 != BB_HEAD (bb1) && LABEL_P (PREV_INSN (last1)))
114890075Sobrien	last1 = PREV_INSN (last1);
114990075Sobrien
1150132718Skan      while (last2 != BB_HEAD (bb2) && !INSN_P (PREV_INSN (last2)))
115190075Sobrien	last2 = PREV_INSN (last2);
115290075Sobrien
1153169689Skan      if (last2 != BB_HEAD (bb2) && LABEL_P (PREV_INSN (last2)))
115490075Sobrien	last2 = PREV_INSN (last2);
115590075Sobrien
115690075Sobrien      *f1 = last1;
115790075Sobrien      *f2 = last2;
115890075Sobrien    }
115990075Sobrien
116090075Sobrien  return ninsns;
116190075Sobrien}
116290075Sobrien
1163169689Skan/* Return true iff the condbranches at the end of BB1 and BB2 match.  */
1164169689Skanbool
1165169689Skancondjump_equiv_p (struct equiv_info *info, bool call_init)
1166169689Skan{
1167169689Skan  basic_block bb1 = info->x_block;
1168169689Skan  basic_block bb2 = info->y_block;
1169169689Skan  edge b1 = BRANCH_EDGE (bb1);
1170169689Skan  edge b2 = BRANCH_EDGE (bb2);
1171169689Skan  edge f1 = FALLTHRU_EDGE (bb1);
1172169689Skan  edge f2 = FALLTHRU_EDGE (bb2);
1173169689Skan  bool reverse, match;
1174169689Skan  rtx set1, set2, cond1, cond2;
1175169689Skan  rtx src1, src2;
1176169689Skan  enum rtx_code code1, code2;
1177169689Skan
1178169689Skan  /* Get around possible forwarders on fallthru edges.  Other cases
1179169689Skan     should be optimized out already.  */
1180169689Skan  if (FORWARDER_BLOCK_P (f1->dest))
1181169689Skan    f1 = single_succ_edge (f1->dest);
1182169689Skan
1183169689Skan  if (FORWARDER_BLOCK_P (f2->dest))
1184169689Skan    f2 = single_succ_edge (f2->dest);
1185169689Skan
1186169689Skan  /* To simplify use of this function, return false if there are
1187169689Skan     unneeded forwarder blocks.  These will get eliminated later
1188169689Skan     during cleanup_cfg.  */
1189169689Skan  if (FORWARDER_BLOCK_P (f1->dest)
1190169689Skan      || FORWARDER_BLOCK_P (f2->dest)
1191169689Skan      || FORWARDER_BLOCK_P (b1->dest)
1192169689Skan      || FORWARDER_BLOCK_P (b2->dest))
1193169689Skan    return false;
1194169689Skan
1195169689Skan  if (f1->dest == f2->dest && b1->dest == b2->dest)
1196169689Skan    reverse = false;
1197169689Skan  else if (f1->dest == b2->dest && b1->dest == f2->dest)
1198169689Skan    reverse = true;
1199169689Skan  else
1200169689Skan    return false;
1201169689Skan
1202169689Skan  set1 = pc_set (BB_END (bb1));
1203169689Skan  set2 = pc_set (BB_END (bb2));
1204169689Skan  if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
1205169689Skan      != (XEXP (SET_SRC (set2), 1) == pc_rtx))
1206169689Skan    reverse = !reverse;
1207169689Skan
1208169689Skan  src1 = SET_SRC (set1);
1209169689Skan  src2 = SET_SRC (set2);
1210169689Skan  cond1 = XEXP (src1, 0);
1211169689Skan  cond2 = XEXP (src2, 0);
1212169689Skan  code1 = GET_CODE (cond1);
1213169689Skan  if (reverse)
1214169689Skan    code2 = reversed_comparison_code (cond2, BB_END (bb2));
1215169689Skan  else
1216169689Skan    code2 = GET_CODE (cond2);
1217169689Skan
1218169689Skan  if (code2 == UNKNOWN)
1219169689Skan    return false;
1220169689Skan
1221169689Skan  if (call_init && !struct_equiv_init (STRUCT_EQUIV_START | info->mode, info))
1222169689Skan    gcc_unreachable ();
1223169689Skan  /* Make the sources of the pc sets unreadable so that when we call
1224169689Skan     insns_match_p it won't process them.
1225169689Skan     The death_notes_match_p from insns_match_p won't see the local registers
1226169689Skan     used for the pc set, but that could only cause missed optimizations when
1227169689Skan     there are actually condjumps that use stack registers.  */
1228169689Skan  SET_SRC (set1) = pc_rtx;
1229169689Skan  SET_SRC (set2) = pc_rtx;
1230169689Skan  /* Verify codes and operands match.  */
1231169689Skan  if (code1 == code2)
1232169689Skan    {
1233169689Skan      match = (insns_match_p (BB_END (bb1), BB_END (bb2), info)
1234169689Skan	       && rtx_equiv_p (&XEXP (cond1, 0), XEXP (cond2, 0), 1, info)
1235169689Skan	       && rtx_equiv_p (&XEXP (cond1, 1), XEXP (cond2, 1), 1, info));
1236169689Skan
1237169689Skan    }
1238169689Skan  else if (code1 == swap_condition (code2))
1239169689Skan    {
1240169689Skan      match = (insns_match_p (BB_END (bb1), BB_END (bb2), info)
1241169689Skan	       && rtx_equiv_p (&XEXP (cond1, 1), XEXP (cond2, 0), 1, info)
1242169689Skan	       && rtx_equiv_p (&XEXP (cond1, 0), XEXP (cond2, 1), 1, info));
1243169689Skan
1244169689Skan    }
1245169689Skan  else
1246169689Skan    match = false;
1247169689Skan  SET_SRC (set1) = src1;
1248169689Skan  SET_SRC (set2) = src2;
1249169689Skan  match &= verify_changes (0);
1250169689Skan
1251169689Skan  /* If we return true, we will join the blocks.  Which means that
1252169689Skan     we will only have one branch prediction bit to work with.  Thus
1253169689Skan     we require the existing branches to have probabilities that are
1254169689Skan     roughly similar.  */
1255169689Skan  if (match
1256169689Skan      && !optimize_size
1257169689Skan      && maybe_hot_bb_p (bb1)
1258169689Skan      && maybe_hot_bb_p (bb2))
1259169689Skan    {
1260169689Skan      int prob2;
1261169689Skan
1262169689Skan      if (b1->dest == b2->dest)
1263169689Skan	prob2 = b2->probability;
1264169689Skan      else
1265169689Skan	/* Do not use f2 probability as f2 may be forwarded.  */
1266169689Skan	prob2 = REG_BR_PROB_BASE - b2->probability;
1267169689Skan
1268169689Skan      /* Fail if the difference in probabilities is greater than 50%.
1269169689Skan	 This rules out two well-predicted branches with opposite
1270169689Skan	 outcomes.  */
1271169689Skan      if (abs (b1->probability - prob2) > REG_BR_PROB_BASE / 2)
1272169689Skan	{
1273169689Skan	  if (dump_file)
1274169689Skan	    fprintf (dump_file,
1275169689Skan		     "Outcomes of branch in bb %i and %i differ too much (%i %i)\n",
1276169689Skan		     bb1->index, bb2->index, b1->probability, prob2);
1277169689Skan
1278169689Skan	  match = false;
1279169689Skan	}
1280169689Skan    }
1281169689Skan
1282169689Skan  if (dump_file && match)
1283169689Skan    fprintf (dump_file, "Conditionals in bb %i and %i match.\n",
1284169689Skan	     bb1->index, bb2->index);
1285169689Skan
1286169689Skan  if (!match)
1287169689Skan    cancel_changes (0);
1288169689Skan  return match;
1289169689Skan}
1290169689Skan
129190075Sobrien/* Return true iff outgoing edges of BB1 and BB2 match, together with
129290075Sobrien   the branch instruction.  This means that if we commonize the control
129390075Sobrien   flow before end of the basic block, the semantic remains unchanged.
129490075Sobrien
129590075Sobrien   We may assume that there exists one edge with a common destination.  */
129690075Sobrien
129790075Sobrienstatic bool
1298132718Skanoutgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
129990075Sobrien{
130090075Sobrien  int nehedges1 = 0, nehedges2 = 0;
130190075Sobrien  edge fallthru1 = 0, fallthru2 = 0;
130290075Sobrien  edge e1, e2;
1303169689Skan  edge_iterator ei;
130490075Sobrien
130590075Sobrien  /* If BB1 has only one successor, we may be looking at either an
130690075Sobrien     unconditional jump, or a fake edge to exit.  */
1307169689Skan  if (single_succ_p (bb1)
1308169689Skan      && (single_succ_edge (bb1)->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
1309169689Skan      && (!JUMP_P (BB_END (bb1)) || simplejump_p (BB_END (bb1))))
1310169689Skan    return (single_succ_p (bb2)
1311169689Skan	    && (single_succ_edge (bb2)->flags
1312169689Skan		& (EDGE_COMPLEX | EDGE_FAKE)) == 0
1313169689Skan	    && (!JUMP_P (BB_END (bb2)) || simplejump_p (BB_END (bb2))));
131490075Sobrien
131590075Sobrien  /* Match conditional jumps - this may get tricky when fallthru and branch
131690075Sobrien     edges are crossed.  */
1317169689Skan  if (EDGE_COUNT (bb1->succs) == 2
1318132718Skan      && any_condjump_p (BB_END (bb1))
1319132718Skan      && onlyjump_p (BB_END (bb1)))
132090075Sobrien    {
132190075Sobrien      edge b1, f1, b2, f2;
132290075Sobrien      bool reverse, match;
132390075Sobrien      rtx set1, set2, cond1, cond2;
132490075Sobrien      enum rtx_code code1, code2;
132590075Sobrien
1326169689Skan      if (EDGE_COUNT (bb2->succs) != 2
1327132718Skan	  || !any_condjump_p (BB_END (bb2))
1328132718Skan	  || !onlyjump_p (BB_END (bb2)))
132990075Sobrien	return false;
133090075Sobrien
133190075Sobrien      b1 = BRANCH_EDGE (bb1);
133290075Sobrien      b2 = BRANCH_EDGE (bb2);
133390075Sobrien      f1 = FALLTHRU_EDGE (bb1);
133490075Sobrien      f2 = FALLTHRU_EDGE (bb2);
133590075Sobrien
133690075Sobrien      /* Get around possible forwarders on fallthru edges.  Other cases
1337169689Skan	 should be optimized out already.  */
133890075Sobrien      if (FORWARDER_BLOCK_P (f1->dest))
1339169689Skan	f1 = single_succ_edge (f1->dest);
134090075Sobrien
134190075Sobrien      if (FORWARDER_BLOCK_P (f2->dest))
1342169689Skan	f2 = single_succ_edge (f2->dest);
134390075Sobrien
134490075Sobrien      /* To simplify use of this function, return false if there are
134590075Sobrien	 unneeded forwarder blocks.  These will get eliminated later
134690075Sobrien	 during cleanup_cfg.  */
134790075Sobrien      if (FORWARDER_BLOCK_P (f1->dest)
134890075Sobrien	  || FORWARDER_BLOCK_P (f2->dest)
134990075Sobrien	  || FORWARDER_BLOCK_P (b1->dest)
135090075Sobrien	  || FORWARDER_BLOCK_P (b2->dest))
135190075Sobrien	return false;
135290075Sobrien
135390075Sobrien      if (f1->dest == f2->dest && b1->dest == b2->dest)
135490075Sobrien	reverse = false;
135590075Sobrien      else if (f1->dest == b2->dest && b1->dest == f2->dest)
135690075Sobrien	reverse = true;
135790075Sobrien      else
135890075Sobrien	return false;
135990075Sobrien
1360132718Skan      set1 = pc_set (BB_END (bb1));
1361132718Skan      set2 = pc_set (BB_END (bb2));
136290075Sobrien      if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
136390075Sobrien	  != (XEXP (SET_SRC (set2), 1) == pc_rtx))
136490075Sobrien	reverse = !reverse;
136590075Sobrien
136690075Sobrien      cond1 = XEXP (SET_SRC (set1), 0);
136790075Sobrien      cond2 = XEXP (SET_SRC (set2), 0);
136890075Sobrien      code1 = GET_CODE (cond1);
136990075Sobrien      if (reverse)
1370132718Skan	code2 = reversed_comparison_code (cond2, BB_END (bb2));
137190075Sobrien      else
137290075Sobrien	code2 = GET_CODE (cond2);
137390075Sobrien
137490075Sobrien      if (code2 == UNKNOWN)
137590075Sobrien	return false;
137690075Sobrien
137790075Sobrien      /* Verify codes and operands match.  */
137890075Sobrien      match = ((code1 == code2
137990075Sobrien		&& rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
138090075Sobrien		&& rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
138190075Sobrien	       || (code1 == swap_condition (code2)
138290075Sobrien		   && rtx_renumbered_equal_p (XEXP (cond1, 1),
138390075Sobrien					      XEXP (cond2, 0))
138490075Sobrien		   && rtx_renumbered_equal_p (XEXP (cond1, 0),
138590075Sobrien					      XEXP (cond2, 1))));
138690075Sobrien
138790075Sobrien      /* If we return true, we will join the blocks.  Which means that
138890075Sobrien	 we will only have one branch prediction bit to work with.  Thus
138990075Sobrien	 we require the existing branches to have probabilities that are
139090075Sobrien	 roughly similar.  */
139190075Sobrien      if (match
139290075Sobrien	  && !optimize_size
1393117395Skan	  && maybe_hot_bb_p (bb1)
1394117395Skan	  && maybe_hot_bb_p (bb2))
139590075Sobrien	{
139690075Sobrien	  int prob2;
139790075Sobrien
139890075Sobrien	  if (b1->dest == b2->dest)
139990075Sobrien	    prob2 = b2->probability;
140090075Sobrien	  else
140190075Sobrien	    /* Do not use f2 probability as f2 may be forwarded.  */
140290075Sobrien	    prob2 = REG_BR_PROB_BASE - b2->probability;
140390075Sobrien
140496263Sobrien	  /* Fail if the difference in probabilities is greater than 50%.
140596263Sobrien	     This rules out two well-predicted branches with opposite
140696263Sobrien	     outcomes.  */
140796263Sobrien	  if (abs (b1->probability - prob2) > REG_BR_PROB_BASE / 2)
140890075Sobrien	    {
1409169689Skan	      if (dump_file)
1410169689Skan		fprintf (dump_file,
1411169689Skan			 "Outcomes of branch in bb %i and %i differ too much (%i %i)\n",
141290075Sobrien			 bb1->index, bb2->index, b1->probability, prob2);
141390075Sobrien
141490075Sobrien	      return false;
141590075Sobrien	    }
141690075Sobrien	}
141790075Sobrien
1418169689Skan      if (dump_file && match)
1419169689Skan	fprintf (dump_file, "Conditionals in bb %i and %i match.\n",
142090075Sobrien		 bb1->index, bb2->index);
142190075Sobrien
142290075Sobrien      return match;
142390075Sobrien    }
142490075Sobrien
1425117395Skan  /* Generic case - we are seeing a computed jump, table jump or trapping
142690075Sobrien     instruction.  */
142790075Sobrien
1428132718Skan  /* Check whether there are tablejumps in the end of BB1 and BB2.
1429132718Skan     Return true if they are identical.  */
1430132718Skan    {
1431132718Skan      rtx label1, label2;
1432132718Skan      rtx table1, table2;
1433132718Skan
1434132718Skan      if (tablejump_p (BB_END (bb1), &label1, &table1)
1435132718Skan	  && tablejump_p (BB_END (bb2), &label2, &table2)
1436132718Skan	  && GET_CODE (PATTERN (table1)) == GET_CODE (PATTERN (table2)))
1437132718Skan	{
1438132718Skan	  /* The labels should never be the same rtx.  If they really are same
1439132718Skan	     the jump tables are same too. So disable crossjumping of blocks BB1
1440132718Skan	     and BB2 because when deleting the common insns in the end of BB1
1441169689Skan	     by delete_basic_block () the jump table would be deleted too.  */
1442132718Skan	  /* If LABEL2 is referenced in BB1->END do not do anything
1443132718Skan	     because we would loose information when replacing
1444132718Skan	     LABEL1 by LABEL2 and then LABEL2 by LABEL1 in BB1->END.  */
1445132718Skan	  if (label1 != label2 && !rtx_referenced_p (label2, BB_END (bb1)))
1446132718Skan	    {
1447132718Skan	      /* Set IDENTICAL to true when the tables are identical.  */
1448132718Skan	      bool identical = false;
1449132718Skan	      rtx p1, p2;
1450132718Skan
1451132718Skan	      p1 = PATTERN (table1);
1452132718Skan	      p2 = PATTERN (table2);
1453132718Skan	      if (GET_CODE (p1) == ADDR_VEC && rtx_equal_p (p1, p2))
1454132718Skan		{
1455132718Skan		  identical = true;
1456132718Skan		}
1457132718Skan	      else if (GET_CODE (p1) == ADDR_DIFF_VEC
1458132718Skan		       && (XVECLEN (p1, 1) == XVECLEN (p2, 1))
1459132718Skan		       && rtx_equal_p (XEXP (p1, 2), XEXP (p2, 2))
1460132718Skan		       && rtx_equal_p (XEXP (p1, 3), XEXP (p2, 3)))
1461132718Skan		{
1462132718Skan		  int i;
1463132718Skan
1464132718Skan		  identical = true;
1465132718Skan		  for (i = XVECLEN (p1, 1) - 1; i >= 0 && identical; i--)
1466132718Skan		    if (!rtx_equal_p (XVECEXP (p1, 1, i), XVECEXP (p2, 1, i)))
1467132718Skan		      identical = false;
1468132718Skan		}
1469132718Skan
1470132718Skan	      if (identical)
1471132718Skan		{
1472132718Skan		  replace_label_data rr;
1473132718Skan		  bool match;
1474132718Skan
1475132718Skan		  /* Temporarily replace references to LABEL1 with LABEL2
1476132718Skan		     in BB1->END so that we could compare the instructions.  */
1477132718Skan		  rr.r1 = label1;
1478132718Skan		  rr.r2 = label2;
1479132718Skan		  rr.update_label_nuses = false;
1480132718Skan		  for_each_rtx (&BB_END (bb1), replace_label, &rr);
1481132718Skan
1482169689Skan		  match = old_insns_match_p (mode, BB_END (bb1), BB_END (bb2));
1483169689Skan		  if (dump_file && match)
1484169689Skan		    fprintf (dump_file,
1485132718Skan			     "Tablejumps in bb %i and %i match.\n",
1486132718Skan			     bb1->index, bb2->index);
1487132718Skan
1488132718Skan		  /* Set the original label in BB1->END because when deleting
1489132718Skan		     a block whose end is a tablejump, the tablejump referenced
1490132718Skan		     from the instruction is deleted too.  */
1491132718Skan		  rr.r1 = label2;
1492132718Skan		  rr.r2 = label1;
1493132718Skan		  for_each_rtx (&BB_END (bb1), replace_label, &rr);
1494132718Skan
1495132718Skan		  return match;
1496132718Skan		}
1497132718Skan	    }
1498132718Skan	  return false;
1499132718Skan	}
1500132718Skan    }
1501132718Skan
150290075Sobrien  /* First ensure that the instructions match.  There may be many outgoing
1503132718Skan     edges so this test is generally cheaper.  */
1504169689Skan  if (!old_insns_match_p (mode, BB_END (bb1), BB_END (bb2)))
150590075Sobrien    return false;
150690075Sobrien
150790075Sobrien  /* Search the outgoing edges, ensure that the counts do match, find possible
150890075Sobrien     fallthru and exception handling edges since these needs more
150990075Sobrien     validation.  */
1510169689Skan  if (EDGE_COUNT (bb1->succs) != EDGE_COUNT (bb2->succs))
1511169689Skan    return false;
1512169689Skan
1513169689Skan  FOR_EACH_EDGE (e1, ei, bb1->succs)
151490075Sobrien    {
1515169689Skan      e2 = EDGE_SUCC (bb2, ei.index);
1516169689Skan
151790075Sobrien      if (e1->flags & EDGE_EH)
151890075Sobrien	nehedges1++;
151990075Sobrien
152090075Sobrien      if (e2->flags & EDGE_EH)
152190075Sobrien	nehedges2++;
152290075Sobrien
152390075Sobrien      if (e1->flags & EDGE_FALLTHRU)
152490075Sobrien	fallthru1 = e1;
152590075Sobrien      if (e2->flags & EDGE_FALLTHRU)
152690075Sobrien	fallthru2 = e2;
152790075Sobrien    }
152890075Sobrien
152990075Sobrien  /* If number of edges of various types does not match, fail.  */
1530169689Skan  if (nehedges1 != nehedges2
153190075Sobrien      || (fallthru1 != 0) != (fallthru2 != 0))
153290075Sobrien    return false;
153390075Sobrien
153490075Sobrien  /* fallthru edges must be forwarded to the same destination.  */
153590075Sobrien  if (fallthru1)
153690075Sobrien    {
153790075Sobrien      basic_block d1 = (forwarder_block_p (fallthru1->dest)
1538169689Skan			? single_succ (fallthru1->dest): fallthru1->dest);
153990075Sobrien      basic_block d2 = (forwarder_block_p (fallthru2->dest)
1540169689Skan			? single_succ (fallthru2->dest): fallthru2->dest);
154190075Sobrien
154290075Sobrien      if (d1 != d2)
154390075Sobrien	return false;
154490075Sobrien    }
154590075Sobrien
1546132718Skan  /* Ensure the same EH region.  */
1547132718Skan  {
1548132718Skan    rtx n1 = find_reg_note (BB_END (bb1), REG_EH_REGION, 0);
1549132718Skan    rtx n2 = find_reg_note (BB_END (bb2), REG_EH_REGION, 0);
155090075Sobrien
1551132718Skan    if (!n1 && n2)
1552132718Skan      return false;
155390075Sobrien
1554132718Skan    if (n1 && (!n2 || XEXP (n1, 0) != XEXP (n2, 0)))
1555132718Skan      return false;
1556132718Skan  }
1557132718Skan
1558169689Skan  /* The same checks as in try_crossjump_to_edge. It is required for RTL
1559169689Skan     version of sequence abstraction.  */
1560169689Skan  FOR_EACH_EDGE (e1, ei, bb2->succs)
1561169689Skan    {
1562169689Skan      edge e2;
1563169689Skan      edge_iterator ei;
1564169689Skan      basic_block d1 = e1->dest;
1565169689Skan
1566169689Skan      if (FORWARDER_BLOCK_P (d1))
1567169689Skan        d1 = EDGE_SUCC (d1, 0)->dest;
1568169689Skan
1569169689Skan      FOR_EACH_EDGE (e2, ei, bb1->succs)
1570169689Skan        {
1571169689Skan          basic_block d2 = e2->dest;
1572169689Skan          if (FORWARDER_BLOCK_P (d2))
1573169689Skan            d2 = EDGE_SUCC (d2, 0)->dest;
1574169689Skan          if (d1 == d2)
1575169689Skan            break;
1576169689Skan        }
1577169689Skan
1578169689Skan      if (!e2)
1579169689Skan        return false;
1580169689Skan    }
1581169689Skan
158290075Sobrien  return true;
158390075Sobrien}
158490075Sobrien
1585169689Skan/* Returns true if BB basic block has a preserve label.  */
1586169689Skan
1587169689Skanstatic bool
1588169689Skanblock_has_preserve_label (basic_block bb)
1589169689Skan{
1590169689Skan  return (bb
1591169689Skan          && block_label (bb)
1592169689Skan          && LABEL_PRESERVE_P (block_label (bb)));
1593169689Skan}
1594169689Skan
159590075Sobrien/* E1 and E2 are edges with the same destination block.  Search their
159690075Sobrien   predecessors for common code.  If found, redirect control flow from
159790075Sobrien   (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC.  */
159890075Sobrien
159990075Sobrienstatic bool
1600132718Skantry_crossjump_to_edge (int mode, edge e1, edge e2)
160190075Sobrien{
160290075Sobrien  int nmatch;
160390075Sobrien  basic_block src1 = e1->src, src2 = e2->src;
1604117395Skan  basic_block redirect_to, redirect_from, to_remove;
160590075Sobrien  rtx newpos1, newpos2;
160690075Sobrien  edge s;
1607169689Skan  edge_iterator ei;
160890075Sobrien
1609169689Skan  newpos1 = newpos2 = NULL_RTX;
1610169689Skan
1611169689Skan  /* If we have partitioned hot/cold basic blocks, it is a bad idea
1612169689Skan     to try this optimization.
1613169689Skan
1614169689Skan     Basic block partitioning may result in some jumps that appear to
1615169689Skan     be optimizable (or blocks that appear to be mergeable), but which really
1616169689Skan     must be left untouched (they are required to make it safely across
1617169689Skan     partition boundaries).  See the comments at the top of
1618169689Skan     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
1619169689Skan
1620169689Skan  if (flag_reorder_blocks_and_partition && no_new_pseudos)
1621169689Skan    return false;
1622169689Skan
162390075Sobrien  /* Search backward through forwarder blocks.  We don't need to worry
162490075Sobrien     about multiple entry or chained forwarders, as they will be optimized
162590075Sobrien     away.  We do this to look past the unconditional jump following a
162690075Sobrien     conditional jump that is required due to the current CFG shape.  */
1627169689Skan  if (single_pred_p (src1)
162890075Sobrien      && FORWARDER_BLOCK_P (src1))
1629169689Skan    e1 = single_pred_edge (src1), src1 = e1->src;
163090075Sobrien
1631169689Skan  if (single_pred_p (src2)
163290075Sobrien      && FORWARDER_BLOCK_P (src2))
1633169689Skan    e2 = single_pred_edge (src2), src2 = e2->src;
163490075Sobrien
163590075Sobrien  /* Nothing to do if we reach ENTRY, or a common source block.  */
163690075Sobrien  if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
163790075Sobrien    return false;
163890075Sobrien  if (src1 == src2)
163990075Sobrien    return false;
164090075Sobrien
164190075Sobrien  /* Seeing more than 1 forwarder blocks would confuse us later...  */
164290075Sobrien  if (FORWARDER_BLOCK_P (e1->dest)
1643169689Skan      && FORWARDER_BLOCK_P (single_succ (e1->dest)))
164490075Sobrien    return false;
164590075Sobrien
164690075Sobrien  if (FORWARDER_BLOCK_P (e2->dest)
1647169689Skan      && FORWARDER_BLOCK_P (single_succ (e2->dest)))
164890075Sobrien    return false;
164990075Sobrien
165090075Sobrien  /* Likewise with dead code (possibly newly created by the other optimizations
165190075Sobrien     of cfg_cleanup).  */
1652169689Skan  if (EDGE_COUNT (src1->preds) == 0 || EDGE_COUNT (src2->preds) == 0)
165390075Sobrien    return false;
165490075Sobrien
165590075Sobrien  /* Look for the common insn sequence, part the first ...  */
165690075Sobrien  if (!outgoing_edges_match (mode, src1, src2))
165790075Sobrien    return false;
165890075Sobrien
165990075Sobrien  /* ... and part the second.  */
166090075Sobrien  nmatch = flow_find_cross_jump (mode, src1, src2, &newpos1, &newpos2);
1661169689Skan
1662169689Skan  /* Don't proceed with the crossjump unless we found a sufficient number
1663169689Skan     of matching instructions or the 'from' block was totally matched
1664169689Skan     (such that its predecessors will hopefully be redirected and the
1665169689Skan     block removed).  */
1666169689Skan  if ((nmatch < PARAM_VALUE (PARAM_MIN_CROSSJUMP_INSNS))
1667169689Skan      && (newpos1 != BB_HEAD (src1)))
166890075Sobrien    return false;
166990075Sobrien
1670169689Skan  /* Avoid deleting preserve label when redirecting ABNORMAL edges.  */
1671169689Skan  if (block_has_preserve_label (e1->dest)
1672169689Skan      && (e1->flags & EDGE_ABNORMAL))
1673169689Skan    return false;
1674169689Skan
1675132718Skan  /* Here we know that the insns in the end of SRC1 which are common with SRC2
1676132718Skan     will be deleted.
1677132718Skan     If we have tablejumps in the end of SRC1 and SRC2
1678132718Skan     they have been already compared for equivalence in outgoing_edges_match ()
1679132718Skan     so replace the references to TABLE1 by references to TABLE2.  */
1680132718Skan    {
1681132718Skan      rtx label1, label2;
1682132718Skan      rtx table1, table2;
1683132718Skan
1684132718Skan      if (tablejump_p (BB_END (src1), &label1, &table1)
1685132718Skan	  && tablejump_p (BB_END (src2), &label2, &table2)
1686132718Skan	  && label1 != label2)
1687132718Skan	{
1688132718Skan	  replace_label_data rr;
1689132718Skan	  rtx insn;
1690132718Skan
1691132718Skan	  /* Replace references to LABEL1 with LABEL2.  */
1692132718Skan	  rr.r1 = label1;
1693132718Skan	  rr.r2 = label2;
1694132718Skan	  rr.update_label_nuses = true;
1695132718Skan	  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1696132718Skan	    {
1697132718Skan	      /* Do not replace the label in SRC1->END because when deleting
1698132718Skan		 a block whose end is a tablejump, the tablejump referenced
1699132718Skan		 from the instruction is deleted too.  */
1700132718Skan	      if (insn != BB_END (src1))
1701132718Skan		for_each_rtx (&insn, replace_label, &rr);
1702132718Skan	    }
1703132718Skan	}
1704132718Skan    }
1705132718Skan
1706169689Skan  /* Avoid splitting if possible.  We must always split when SRC2 has
1707169689Skan     EH predecessor edges, or we may end up with basic blocks with both
1708169689Skan     normal and EH predecessor edges.  */
1709169689Skan  if (newpos2 == BB_HEAD (src2)
1710169689Skan      && !(EDGE_PRED (src2, 0)->flags & EDGE_EH))
171190075Sobrien    redirect_to = src2;
171290075Sobrien  else
171390075Sobrien    {
1714169689Skan      if (newpos2 == BB_HEAD (src2))
1715169689Skan	{
1716169689Skan	  /* Skip possible basic block header.  */
1717169689Skan	  if (LABEL_P (newpos2))
1718169689Skan	    newpos2 = NEXT_INSN (newpos2);
1719169689Skan	  if (NOTE_P (newpos2))
1720169689Skan	    newpos2 = NEXT_INSN (newpos2);
1721169689Skan	}
1722169689Skan
1723169689Skan      if (dump_file)
1724169689Skan	fprintf (dump_file, "Splitting bb %i before %i insns\n",
172590075Sobrien		 src2->index, nmatch);
172690075Sobrien      redirect_to = split_block (src2, PREV_INSN (newpos2))->dest;
172790075Sobrien    }
172890075Sobrien
1729169689Skan  if (dump_file)
1730169689Skan    fprintf (dump_file,
173190075Sobrien	     "Cross jumping from bb %i to bb %i; %i common insns\n",
173290075Sobrien	     src1->index, src2->index, nmatch);
173390075Sobrien
173490075Sobrien  redirect_to->count += src1->count;
173590075Sobrien  redirect_to->frequency += src1->frequency;
1736169689Skan  /* We may have some registers visible through the block.  */
1737117395Skan  redirect_to->flags |= BB_DIRTY;
173890075Sobrien
173990075Sobrien  /* Recompute the frequencies and counts of outgoing edges.  */
1740169689Skan  FOR_EACH_EDGE (s, ei, redirect_to->succs)
174190075Sobrien    {
174290075Sobrien      edge s2;
1743169689Skan      edge_iterator ei;
174490075Sobrien      basic_block d = s->dest;
174590075Sobrien
174690075Sobrien      if (FORWARDER_BLOCK_P (d))
1747169689Skan	d = single_succ (d);
174890075Sobrien
1749169689Skan      FOR_EACH_EDGE (s2, ei, src1->succs)
175090075Sobrien	{
175190075Sobrien	  basic_block d2 = s2->dest;
175290075Sobrien	  if (FORWARDER_BLOCK_P (d2))
1753169689Skan	    d2 = single_succ (d2);
175490075Sobrien	  if (d == d2)
175590075Sobrien	    break;
175690075Sobrien	}
175790075Sobrien
175890075Sobrien      s->count += s2->count;
175990075Sobrien
176090075Sobrien      /* Take care to update possible forwarder blocks.  We verified
1761169689Skan	 that there is no more than one in the chain, so we can't run
1762169689Skan	 into infinite loop.  */
176390075Sobrien      if (FORWARDER_BLOCK_P (s->dest))
176490075Sobrien	{
1765169689Skan	  single_succ_edge (s->dest)->count += s2->count;
176690075Sobrien	  s->dest->count += s2->count;
176790075Sobrien	  s->dest->frequency += EDGE_FREQUENCY (s);
176890075Sobrien	}
176990075Sobrien
177090075Sobrien      if (FORWARDER_BLOCK_P (s2->dest))
177190075Sobrien	{
1772169689Skan	  single_succ_edge (s2->dest)->count -= s2->count;
1773169689Skan	  if (single_succ_edge (s2->dest)->count < 0)
1774169689Skan	    single_succ_edge (s2->dest)->count = 0;
177590075Sobrien	  s2->dest->count -= s2->count;
177690075Sobrien	  s2->dest->frequency -= EDGE_FREQUENCY (s);
177790075Sobrien	  if (s2->dest->frequency < 0)
177890075Sobrien	    s2->dest->frequency = 0;
177990075Sobrien	  if (s2->dest->count < 0)
178090075Sobrien	    s2->dest->count = 0;
178190075Sobrien	}
178290075Sobrien
178390075Sobrien      if (!redirect_to->frequency && !src1->frequency)
178490075Sobrien	s->probability = (s->probability + s2->probability) / 2;
178590075Sobrien      else
178690075Sobrien	s->probability
178790075Sobrien	  = ((s->probability * redirect_to->frequency +
178890075Sobrien	      s2->probability * src1->frequency)
178990075Sobrien	     / (redirect_to->frequency + src1->frequency));
179090075Sobrien    }
179190075Sobrien
179290075Sobrien  update_br_prob_note (redirect_to);
179390075Sobrien
179490075Sobrien  /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1.  */
179590075Sobrien
179690075Sobrien  /* Skip possible basic block header.  */
1797169689Skan  if (LABEL_P (newpos1))
179890075Sobrien    newpos1 = NEXT_INSN (newpos1);
179990075Sobrien
1800169689Skan  if (NOTE_P (newpos1))
180190075Sobrien    newpos1 = NEXT_INSN (newpos1);
180290075Sobrien
1803117395Skan  redirect_from = split_block (src1, PREV_INSN (newpos1))->src;
1804169689Skan  to_remove = single_succ (redirect_from);
180590075Sobrien
1806169689Skan  redirect_edge_and_branch_force (single_succ_edge (redirect_from), redirect_to);
1807169689Skan  delete_basic_block (to_remove);
180890075Sobrien
1809117395Skan  update_forwarder_flag (redirect_from);
1810169689Skan  if (redirect_to != src2)
1811169689Skan    update_forwarder_flag (src2);
181290075Sobrien
181390075Sobrien  return true;
181490075Sobrien}
181590075Sobrien
181690075Sobrien/* Search the predecessors of BB for common insn sequences.  When found,
181790075Sobrien   share code between them by redirecting control flow.  Return true if
181890075Sobrien   any changes made.  */
181990075Sobrien
182090075Sobrienstatic bool
1821132718Skantry_crossjump_bb (int mode, basic_block bb)
182290075Sobrien{
1823169689Skan  edge e, e2, fallthru;
182490075Sobrien  bool changed;
1825169689Skan  unsigned max, ix, ix2;
1826169689Skan  basic_block ev, ev2;
1827169689Skan  edge_iterator ei;
182890075Sobrien
182990075Sobrien  /* Nothing to do if there is not at least two incoming edges.  */
1830169689Skan  if (EDGE_COUNT (bb->preds) < 2)
183190075Sobrien    return false;
183290075Sobrien
1833169689Skan  /* Don't crossjump if this block ends in a computed jump,
1834169689Skan     unless we are optimizing for size.  */
1835169689Skan  if (!optimize_size
1836169689Skan      && bb != EXIT_BLOCK_PTR
1837169689Skan      && computed_jump_p (BB_END (bb)))
1838169689Skan    return false;
1839169689Skan
1840169689Skan  /* If we are partitioning hot/cold basic blocks, we don't want to
1841169689Skan     mess up unconditional or indirect jumps that cross between hot
1842169689Skan     and cold sections.
1843169689Skan
1844169689Skan     Basic block partitioning may result in some jumps that appear to
1845169689Skan     be optimizable (or blocks that appear to be mergeable), but which really
1846169689Skan     must be left untouched (they are required to make it safely across
1847169689Skan     partition boundaries).  See the comments at the top of
1848169689Skan     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
1849169689Skan
1850169689Skan  if (BB_PARTITION (EDGE_PRED (bb, 0)->src) !=
1851169689Skan					BB_PARTITION (EDGE_PRED (bb, 1)->src)
1852169689Skan      || (EDGE_PRED (bb, 0)->flags & EDGE_CROSSING))
1853169689Skan    return false;
1854169689Skan
185590075Sobrien  /* It is always cheapest to redirect a block that ends in a branch to
185690075Sobrien     a block that falls through into BB, as that adds no branches to the
185790075Sobrien     program.  We'll try that combination first.  */
1858117395Skan  fallthru = NULL;
1859117395Skan  max = PARAM_VALUE (PARAM_MAX_CROSSJUMP_EDGES);
1860169689Skan
1861169689Skan  if (EDGE_COUNT (bb->preds) > max)
1862169689Skan    return false;
1863169689Skan
1864169689Skan  FOR_EACH_EDGE (e, ei, bb->preds)
1865117395Skan    {
1866117395Skan      if (e->flags & EDGE_FALLTHRU)
1867117395Skan	fallthru = e;
1868117395Skan    }
186990075Sobrien
187090075Sobrien  changed = false;
1871169689Skan  for (ix = 0, ev = bb; ix < EDGE_COUNT (ev->preds); )
187290075Sobrien    {
1873169689Skan      e = EDGE_PRED (ev, ix);
1874169689Skan      ix++;
187590075Sobrien
187690075Sobrien      /* As noted above, first try with the fallthru predecessor.  */
187790075Sobrien      if (fallthru)
187890075Sobrien	{
187990075Sobrien	  /* Don't combine the fallthru edge into anything else.
188090075Sobrien	     If there is a match, we'll do it the other way around.  */
188190075Sobrien	  if (e == fallthru)
188290075Sobrien	    continue;
1883169689Skan	  /* If nothing changed since the last attempt, there is nothing
1884169689Skan	     we can do.  */
1885169689Skan	  if (!first_pass
1886169689Skan	      && (!(e->src->flags & BB_DIRTY)
1887169689Skan		  && !(fallthru->src->flags & BB_DIRTY)))
1888169689Skan	    continue;
188990075Sobrien
189090075Sobrien	  if (try_crossjump_to_edge (mode, e, fallthru))
189190075Sobrien	    {
189290075Sobrien	      changed = true;
1893169689Skan	      ix = 0;
1894169689Skan	      ev = bb;
189590075Sobrien	      continue;
189690075Sobrien	    }
189790075Sobrien	}
189890075Sobrien
189990075Sobrien      /* Non-obvious work limiting check: Recognize that we're going
190090075Sobrien	 to call try_crossjump_bb on every basic block.  So if we have
190190075Sobrien	 two blocks with lots of outgoing edges (a switch) and they
190290075Sobrien	 share lots of common destinations, then we would do the
190390075Sobrien	 cross-jump check once for each common destination.
190490075Sobrien
190590075Sobrien	 Now, if the blocks actually are cross-jump candidates, then
190690075Sobrien	 all of their destinations will be shared.  Which means that
190790075Sobrien	 we only need check them for cross-jump candidacy once.  We
190890075Sobrien	 can eliminate redundant checks of crossjump(A,B) by arbitrarily
190990075Sobrien	 choosing to do the check from the block for which the edge
191090075Sobrien	 in question is the first successor of A.  */
1911169689Skan      if (EDGE_SUCC (e->src, 0) != e)
191290075Sobrien	continue;
191390075Sobrien
1914169689Skan      for (ix2 = 0, ev2 = bb; ix2 < EDGE_COUNT (ev2->preds); )
191590075Sobrien	{
1916169689Skan	  e2 = EDGE_PRED (ev2, ix2);
1917169689Skan	  ix2++;
191890075Sobrien
191990075Sobrien	  if (e2 == e)
192090075Sobrien	    continue;
192190075Sobrien
192290075Sobrien	  /* We've already checked the fallthru edge above.  */
192390075Sobrien	  if (e2 == fallthru)
192490075Sobrien	    continue;
192590075Sobrien
192690075Sobrien	  /* The "first successor" check above only prevents multiple
192790075Sobrien	     checks of crossjump(A,B).  In order to prevent redundant
192890075Sobrien	     checks of crossjump(B,A), require that A be the block
192990075Sobrien	     with the lowest index.  */
193090075Sobrien	  if (e->src->index > e2->src->index)
193190075Sobrien	    continue;
193290075Sobrien
1933169689Skan	  /* If nothing changed since the last attempt, there is nothing
1934169689Skan	     we can do.  */
1935169689Skan	  if (!first_pass
1936169689Skan	      && (!(e->src->flags & BB_DIRTY)
1937169689Skan		  && !(e2->src->flags & BB_DIRTY)))
1938169689Skan	    continue;
1939169689Skan
194090075Sobrien	  if (try_crossjump_to_edge (mode, e, e2))
194190075Sobrien	    {
194290075Sobrien	      changed = true;
1943169689Skan	      ev2 = bb;
1944169689Skan	      ix = 0;
194590075Sobrien	      break;
194690075Sobrien	    }
194790075Sobrien	}
194890075Sobrien    }
194990075Sobrien
195090075Sobrien  return changed;
195190075Sobrien}
195290075Sobrien
195390075Sobrien/* Do simple CFG optimizations - basic block merging, simplifying of jump
195490075Sobrien   instructions etc.  Return nonzero if changes were made.  */
195590075Sobrien
195690075Sobrienstatic bool
1957132718Skantry_optimize_cfg (int mode)
195890075Sobrien{
195990075Sobrien  bool changed_overall = false;
196090075Sobrien  bool changed;
196190075Sobrien  int iterations = 0;
1962132718Skan  basic_block bb, b, next;
196390075Sobrien
196490075Sobrien  if (mode & CLEANUP_CROSSJUMP)
196590075Sobrien    add_noreturn_fake_exit_edges ();
196690075Sobrien
1967169689Skan  if (mode & (CLEANUP_UPDATE_LIFE | CLEANUP_CROSSJUMP | CLEANUP_THREADING))
1968169689Skan    clear_bb_flags ();
1969169689Skan
1970117395Skan  FOR_EACH_BB (bb)
1971117395Skan    update_forwarder_flag (bb);
197290075Sobrien
1973169689Skan  if (! targetm.cannot_modify_jumps_p ())
197490075Sobrien    {
1975169689Skan      first_pass = true;
197696263Sobrien      /* Attempt to merge blocks as made possible by edge removal.  If
197796263Sobrien	 a block has only one successor, and the successor has only
197896263Sobrien	 one predecessor, they may be combined.  */
197996263Sobrien      do
198096263Sobrien	{
198196263Sobrien	  changed = false;
198296263Sobrien	  iterations++;
198390075Sobrien
1984169689Skan	  if (dump_file)
1985169689Skan	    fprintf (dump_file,
198696263Sobrien		     "\n\ntry_optimize_cfg iteration %i\n\n",
198796263Sobrien		     iterations);
198890075Sobrien
1989117395Skan	  for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR;)
199090075Sobrien	    {
1991117395Skan	      basic_block c;
199296263Sobrien	      edge s;
199396263Sobrien	      bool changed_here = false;
199490075Sobrien
199596263Sobrien	      /* Delete trivially dead basic blocks.  */
1996169689Skan	      while (EDGE_COUNT (b->preds) == 0)
199796263Sobrien		{
1998117395Skan		  c = b->prev_bb;
1999169689Skan		  if (dump_file)
2000169689Skan		    fprintf (dump_file, "Deleting block %i.\n",
200196263Sobrien			     b->index);
200290075Sobrien
2003169689Skan		  delete_basic_block (b);
2004132718Skan		  if (!(mode & CLEANUP_CFGLAYOUT))
2005132718Skan		    changed = true;
200696263Sobrien		  b = c;
200796263Sobrien		}
200890075Sobrien
2009169689Skan	      /* Remove code labels no longer used.  */
2010169689Skan	      if (single_pred_p (b)
2011169689Skan		  && (single_pred_edge (b)->flags & EDGE_FALLTHRU)
2012169689Skan		  && !(single_pred_edge (b)->flags & EDGE_COMPLEX)
2013169689Skan		  && LABEL_P (BB_HEAD (b))
201496263Sobrien		  /* If the previous block ends with a branch to this
201596263Sobrien		     block, we can't delete the label.  Normally this
201696263Sobrien		     is a condjump that is yet to be simplified, but
201796263Sobrien		     if CASE_DROPS_THRU, this can be a tablejump with
201896263Sobrien		     some element going to the same place as the
201996263Sobrien		     default (fallthru).  */
2020169689Skan		  && (single_pred (b) == ENTRY_BLOCK_PTR
2021169689Skan		      || !JUMP_P (BB_END (single_pred (b)))
2022132718Skan		      || ! label_is_jump_target_p (BB_HEAD (b),
2023169689Skan						   BB_END (single_pred (b)))))
202496263Sobrien		{
2025132718Skan		  rtx label = BB_HEAD (b);
202690075Sobrien
202796263Sobrien		  delete_insn_chain (label, label);
2028132718Skan		  /* In the case label is undeletable, move it after the
2029132718Skan		     BASIC_BLOCK note.  */
2030132718Skan		  if (NOTE_LINE_NUMBER (BB_HEAD (b)) == NOTE_INSN_DELETED_LABEL)
2031132718Skan		    {
2032132718Skan		      rtx bb_note = NEXT_INSN (BB_HEAD (b));
2033132718Skan
2034132718Skan		      reorder_insns_nobb (label, label, bb_note);
2035132718Skan		      BB_HEAD (b) = bb_note;
2036132718Skan		    }
2037169689Skan		  if (dump_file)
2038169689Skan		    fprintf (dump_file, "Deleted label in block %i.\n",
203996263Sobrien			     b->index);
204096263Sobrien		}
204190075Sobrien
204296263Sobrien	      /* If we fall through an empty block, we can remove it.  */
2043132718Skan	      if (!(mode & CLEANUP_CFGLAYOUT)
2044169689Skan		  && single_pred_p (b)
2045169689Skan		  && (single_pred_edge (b)->flags & EDGE_FALLTHRU)
2046169689Skan		  && !LABEL_P (BB_HEAD (b))
204796263Sobrien		  && FORWARDER_BLOCK_P (b)
204896263Sobrien		  /* Note that forwarder_block_p true ensures that
204996263Sobrien		     there is a successor for this block.  */
2050169689Skan		  && (single_succ_edge (b)->flags & EDGE_FALLTHRU)
2051169689Skan		  && n_basic_blocks > NUM_FIXED_BLOCKS + 1)
205296263Sobrien		{
2053169689Skan		  if (dump_file)
2054169689Skan		    fprintf (dump_file,
205596263Sobrien			     "Deleting fallthru block %i.\n",
205696263Sobrien			     b->index);
205790075Sobrien
2058117395Skan		  c = b->prev_bb == ENTRY_BLOCK_PTR ? b->next_bb : b->prev_bb;
2059169689Skan		  redirect_edge_succ_nodup (single_pred_edge (b),
2060169689Skan					    single_succ (b));
2061169689Skan		  delete_basic_block (b);
206296263Sobrien		  changed = true;
206396263Sobrien		  b = c;
206496263Sobrien		}
206590075Sobrien
2066169689Skan	      if (single_succ_p (b)
2067169689Skan		  && (s = single_succ_edge (b))
2068132718Skan		  && !(s->flags & EDGE_COMPLEX)
2069132718Skan		  && (c = s->dest) != EXIT_BLOCK_PTR
2070169689Skan		  && single_pred_p (c)
2071132718Skan		  && b != c)
2072132718Skan		{
2073132718Skan		  /* When not in cfg_layout mode use code aware of reordering
2074132718Skan		     INSN.  This code possibly creates new basic blocks so it
2075132718Skan		     does not fit merge_blocks interface and is kept here in
2076132718Skan		     hope that it will become useless once more of compiler
2077132718Skan		     is transformed to use cfg_layout mode.  */
2078169689Skan
2079132718Skan		  if ((mode & CLEANUP_CFGLAYOUT)
2080132718Skan		      && can_merge_blocks_p (b, c))
2081132718Skan		    {
2082132718Skan		      merge_blocks (b, c);
2083132718Skan		      update_forwarder_flag (b);
2084132718Skan		      changed_here = true;
2085132718Skan		    }
2086132718Skan		  else if (!(mode & CLEANUP_CFGLAYOUT)
2087132718Skan			   /* If the jump insn has side effects,
2088132718Skan			      we can't kill the edge.  */
2089169689Skan			   && (!JUMP_P (BB_END (b))
2090132718Skan			       || (reload_completed
2091132718Skan				   ? simplejump_p (BB_END (b))
2092169689Skan				   : (onlyjump_p (BB_END (b))
2093169689Skan				      && !tablejump_p (BB_END (b),
2094169689Skan						       NULL, NULL))))
2095132718Skan			   && (next = merge_blocks_move (s, b, c, mode)))
2096132718Skan		      {
2097132718Skan			b = next;
2098132718Skan			changed_here = true;
2099132718Skan		      }
2100132718Skan		}
210190075Sobrien
210296263Sobrien	      /* Simplify branch over branch.  */
2103132718Skan	      if ((mode & CLEANUP_EXPENSIVE)
2104132718Skan		   && !(mode & CLEANUP_CFGLAYOUT)
2105132718Skan		   && try_simplify_condjump (b))
2106117395Skan		changed_here = true;
210796263Sobrien
210896263Sobrien	      /* If B has a single outgoing edge, but uses a
210996263Sobrien		 non-trivial jump instruction without side-effects, we
211096263Sobrien		 can either delete the jump entirely, or replace it
2111132718Skan		 with a simple unconditional jump.  */
2112169689Skan	      if (single_succ_p (b)
2113169689Skan		  && single_succ (b) != EXIT_BLOCK_PTR
2114132718Skan		  && onlyjump_p (BB_END (b))
2115169689Skan		  && !find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
2116169689Skan		  && try_redirect_by_replacing_jump (single_succ_edge (b),
2117169689Skan						     single_succ (b),
2118132718Skan						     (mode & CLEANUP_CFGLAYOUT) != 0))
211996263Sobrien		{
212096263Sobrien		  update_forwarder_flag (b);
212196263Sobrien		  changed_here = true;
212296263Sobrien		}
212396263Sobrien
212496263Sobrien	      /* Simplify branch to branch.  */
212596263Sobrien	      if (try_forward_edges (mode, b))
212696263Sobrien		changed_here = true;
212796263Sobrien
212896263Sobrien	      /* Look for shared code between blocks.  */
212996263Sobrien	      if ((mode & CLEANUP_CROSSJUMP)
213096263Sobrien		  && try_crossjump_bb (mode, b))
213196263Sobrien		changed_here = true;
213296263Sobrien
213396263Sobrien	      /* Don't get confused by the index shift caused by
213496263Sobrien		 deleting blocks.  */
213596263Sobrien	      if (!changed_here)
2136117395Skan		b = b->next_bb;
213796263Sobrien	      else
213896263Sobrien		changed = true;
213990075Sobrien	    }
214090075Sobrien
214190075Sobrien	  if ((mode & CLEANUP_CROSSJUMP)
214296263Sobrien	      && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
214390075Sobrien	    changed = true;
214490075Sobrien
214590075Sobrien#ifdef ENABLE_CHECKING
214696263Sobrien	  if (changed)
214796263Sobrien	    verify_flow_info ();
214890075Sobrien#endif
214990075Sobrien
215096263Sobrien	  changed_overall |= changed;
2151169689Skan	  first_pass = false;
215296263Sobrien	}
215396263Sobrien      while (changed);
215490075Sobrien    }
215590075Sobrien
215690075Sobrien  if (mode & CLEANUP_CROSSJUMP)
2157169689Skan    remove_fake_exit_edges ();
215890075Sobrien
2159169689Skan  FOR_ALL_BB (b)
2160169689Skan    b->flags &= ~(BB_FORWARDER_BLOCK | BB_NONTHREADABLE_BLOCK);
216190075Sobrien
216290075Sobrien  return changed_overall;
216390075Sobrien}
216490075Sobrien
216590075Sobrien/* Delete all unreachable basic blocks.  */
216690075Sobrien
2167117395Skanbool
2168132718Skandelete_unreachable_blocks (void)
216990075Sobrien{
217090075Sobrien  bool changed = false;
2171117395Skan  basic_block b, next_bb;
217290075Sobrien
217390075Sobrien  find_unreachable_blocks ();
217490075Sobrien
2175117395Skan  /* Delete all unreachable basic blocks.  */
217690075Sobrien
2177117395Skan  for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
217890075Sobrien    {
2179117395Skan      next_bb = b->next_bb;
218090075Sobrien
218190075Sobrien      if (!(b->flags & BB_REACHABLE))
218296263Sobrien	{
2183169689Skan	  delete_basic_block (b);
218496263Sobrien	  changed = true;
218596263Sobrien	}
218690075Sobrien    }
218790075Sobrien
218890075Sobrien  if (changed)
218990075Sobrien    tidy_fallthru_edges ();
219090075Sobrien  return changed;
219190075Sobrien}
2192169689Skan
2193169689Skan/* Merges sequential blocks if possible.  */
2194169689Skan
2195169689Skanbool
2196169689Skanmerge_seq_blocks (void)
2197169689Skan{
2198169689Skan  basic_block bb;
2199169689Skan  bool changed = false;
2200169689Skan
2201169689Skan  for (bb = ENTRY_BLOCK_PTR->next_bb; bb != EXIT_BLOCK_PTR; )
2202169689Skan    {
2203169689Skan      if (single_succ_p (bb)
2204169689Skan	  && can_merge_blocks_p (bb, single_succ (bb)))
2205169689Skan	{
2206169689Skan	  /* Merge the blocks and retry.  */
2207169689Skan	  merge_blocks (bb, single_succ (bb));
2208169689Skan	  changed = true;
2209169689Skan	  continue;
2210169689Skan	}
2211169689Skan
2212169689Skan      bb = bb->next_bb;
2213169689Skan    }
2214169689Skan
2215169689Skan  return changed;
2216169689Skan}
221790075Sobrien
221890075Sobrien/* Tidy the CFG by deleting unreachable code and whatnot.  */
221990075Sobrien
222090075Sobrienbool
2221132718Skancleanup_cfg (int mode)
222290075Sobrien{
222390075Sobrien  bool changed = false;
222490075Sobrien
222590075Sobrien  timevar_push (TV_CLEANUP_CFG);
2226117395Skan  if (delete_unreachable_blocks ())
2227117395Skan    {
2228117395Skan      changed = true;
2229117395Skan      /* We've possibly created trivially dead code.  Cleanup it right
2230132718Skan	 now to introduce more opportunities for try_optimize_cfg.  */
2231169689Skan      if (!(mode & (CLEANUP_NO_INSN_DEL | CLEANUP_UPDATE_LIFE))
2232117395Skan	  && !reload_completed)
2233117395Skan	delete_trivially_dead_insns (get_insns(), max_reg_num ());
2234117395Skan    }
223590075Sobrien
2236117395Skan  compact_blocks ();
2237117395Skan
2238117395Skan  while (try_optimize_cfg (mode))
2239117395Skan    {
2240117395Skan      delete_unreachable_blocks (), changed = true;
2241117395Skan      if (mode & CLEANUP_UPDATE_LIFE)
2242117395Skan	{
2243132718Skan	  /* Cleaning up CFG introduces more opportunities for dead code
2244132718Skan	     removal that in turn may introduce more opportunities for
2245117395Skan	     cleaning up the CFG.  */
2246117395Skan	  if (!update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
2247117395Skan						 PROP_DEATH_NOTES
2248117395Skan						 | PROP_SCAN_DEAD_CODE
2249117395Skan						 | PROP_KILL_DEAD_CODE
2250169689Skan						 | ((mode & CLEANUP_LOG_LINKS)
2251132718Skan						    ? PROP_LOG_LINKS : 0)))
2252117395Skan	    break;
2253117395Skan	}
2254169689Skan      else if (!(mode & CLEANUP_NO_INSN_DEL)
2255117395Skan	       && (mode & CLEANUP_EXPENSIVE)
2256117395Skan	       && !reload_completed)
2257117395Skan	{
2258117395Skan	  if (!delete_trivially_dead_insns (get_insns(), max_reg_num ()))
2259117395Skan	    break;
2260117395Skan	}
2261117395Skan      else
2262117395Skan	break;
2263117395Skan      delete_dead_jumptables ();
2264117395Skan    }
2265117395Skan
226690075Sobrien  timevar_pop (TV_CLEANUP_CFG);
226790075Sobrien
226890075Sobrien  return changed;
226990075Sobrien}
2270169689Skan
2271169689Skanstatic unsigned int
2272169689Skanrest_of_handle_jump (void)
2273169689Skan{
2274169689Skan  delete_unreachable_blocks ();
2275169689Skan
2276169689Skan  if (cfun->tail_call_emit)
2277169689Skan    fixup_tail_calls ();
2278169689Skan  return 0;
2279169689Skan}
2280169689Skan
2281169689Skanstruct tree_opt_pass pass_jump =
2282169689Skan{
2283169689Skan  "sibling",                            /* name */
2284169689Skan  NULL,                                 /* gate */
2285169689Skan  rest_of_handle_jump,			/* execute */
2286169689Skan  NULL,                                 /* sub */
2287169689Skan  NULL,                                 /* next */
2288169689Skan  0,                                    /* static_pass_number */
2289169689Skan  TV_JUMP,                              /* tv_id */
2290169689Skan  0,                                    /* properties_required */
2291169689Skan  0,                                    /* properties_provided */
2292169689Skan  0,                                    /* properties_destroyed */
2293169689Skan  TODO_ggc_collect,                     /* todo_flags_start */
2294169689Skan  TODO_dump_func |
2295169689Skan  TODO_verify_flow,                     /* todo_flags_finish */
2296169689Skan  'i'                                   /* letter */
2297169689Skan};
2298169689Skan
2299169689Skan
2300169689Skanstatic unsigned int
2301169689Skanrest_of_handle_jump2 (void)
2302169689Skan{
2303169689Skan  /* Turn NOTE_INSN_EXPECTED_VALUE into REG_BR_PROB.  Do this
2304169689Skan     before jump optimization switches branch directions.  */
2305169689Skan  if (flag_guess_branch_prob)
2306169689Skan    expected_value_to_br_prob ();
2307169689Skan
2308169689Skan  delete_trivially_dead_insns (get_insns (), max_reg_num ());
2309169689Skan  reg_scan (get_insns (), max_reg_num ());
2310169689Skan  if (dump_file)
2311169689Skan    dump_flow_info (dump_file, dump_flags);
2312169689Skan  cleanup_cfg ((optimize ? CLEANUP_EXPENSIVE : 0)
2313169689Skan	       | (flag_thread_jumps ? CLEANUP_THREADING : 0));
2314169689Skan
2315169689Skan  purge_line_number_notes ();
2316169689Skan
2317169689Skan  if (optimize)
2318169689Skan    cleanup_cfg (CLEANUP_EXPENSIVE);
2319169689Skan
2320169689Skan  /* Jump optimization, and the removal of NULL pointer checks, may
2321169689Skan     have reduced the number of instructions substantially.  CSE, and
2322169689Skan     future passes, allocate arrays whose dimensions involve the
2323169689Skan     maximum instruction UID, so if we can reduce the maximum UID
2324169689Skan     we'll save big on memory.  */
2325169689Skan  renumber_insns ();
2326169689Skan  return 0;
2327169689Skan}
2328169689Skan
2329169689Skan
2330169689Skanstruct tree_opt_pass pass_jump2 =
2331169689Skan{
2332169689Skan  "jump",                               /* name */
2333169689Skan  NULL,                                 /* gate */
2334169689Skan  rest_of_handle_jump2,			/* execute */
2335169689Skan  NULL,                                 /* sub */
2336169689Skan  NULL,                                 /* next */
2337169689Skan  0,                                    /* static_pass_number */
2338169689Skan  TV_JUMP,                              /* tv_id */
2339169689Skan  0,                                    /* properties_required */
2340169689Skan  0,                                    /* properties_provided */
2341169689Skan  0,                                    /* properties_destroyed */
2342169689Skan  TODO_ggc_collect,                     /* todo_flags_start */
2343169689Skan  TODO_dump_func,                       /* todo_flags_finish */
2344169689Skan  'j'                                   /* letter */
2345169689Skan};
2346169689Skan
2347169689Skan
2348