1139749Simp/* CPU mode switching
239223Sgibbs   Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
339223Sgibbs   Free Software Foundation, Inc.
452052Smdodd
552052SmdoddThis file is part of GCC.
652052Smdodd
752052SmdoddGCC is free software; you can redistribute it and/or modify it under
839223Sgibbsthe terms of the GNU General Public License as published by the Free
944581SgibbsSoftware Foundation; either version 2, or (at your option) any later
1039223Sgibbsversion.
1139223Sgibbs
1239223SgibbsGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1339223SgibbsWARRANTY; without even the implied warranty of MERCHANTABILITY or
1439223SgibbsFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1539223Sgibbsfor more details.
1639223Sgibbs
1739223SgibbsYou should have received a copy of the GNU General Public License
1839223Sgibbsalong with GCC; see the file COPYING.  If not, write to the Free
1939223SgibbsSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2039223Sgibbs02110-1301, USA.  */
2139223Sgibbs
2239223Sgibbs#include "config.h"
2339223Sgibbs#include "system.h"
2439223Sgibbs#include "coretypes.h"
2539223Sgibbs#include "tm.h"
2639223Sgibbs#include "rtl.h"
2739223Sgibbs#include "regs.h"
2839223Sgibbs#include "hard-reg-set.h"
2939223Sgibbs#include "flags.h"
3039223Sgibbs#include "real.h"
3139223Sgibbs#include "insn-config.h"
3239223Sgibbs#include "recog.h"
3339223Sgibbs#include "basic-block.h"
34119418Sobrien#include "output.h"
35119418Sobrien#include "tm_p.h"
36119418Sobrien#include "function.h"
3739223Sgibbs#include "tree-pass.h"
3839223Sgibbs#include "timevar.h"
3939223Sgibbs
4039223Sgibbs/* We want target macros for the mode switching code to be able to refer
4139223Sgibbs   to instruction attribute values.  */
4239223Sgibbs#include "insn-attr.h"
4339223Sgibbs
4439223Sgibbs#ifdef OPTIMIZE_MODE_SWITCHING
4539223Sgibbs
46241592Sjhb/* The algorithm for setting the modes consists of scanning the insn list
4739223Sgibbs   and finding all the insns which require a specific mode.  Each insn gets
4839223Sgibbs   a unique struct seginfo element.  These structures are inserted into a list
4939223Sgibbs   for each basic block.  For each entity, there is an array of bb_info over
50117126Sscottl   the flow graph basic blocks (local var 'bb_info'), and contains a list
51165102Smjacob   of all insns within that basic block, in the order they are encountered.
52117126Sscottl
5339223Sgibbs   For each entity, any basic block WITHOUT any insns requiring a specific
5445791Speter   mode are given a single entry, without a mode.  (Each basic block
5539223Sgibbs   in the flow graph must have at least one entry in the segment table.)
5639223Sgibbs
5745791Speter   The LCM algorithm is then run over the flow graph to determine where to
5839223Sgibbs   place the sets to the highest-priority value in respect of first the first
5939223Sgibbs   insn in any one block.  Any adjustments required to the transparency
6039223Sgibbs   vectors are made, then the next iteration starts for the next-lower
6139223Sgibbs   priority mode, till for each entity all modes are exhausted.
6239223Sgibbs
6339223Sgibbs   More details are located in the code for optimize_mode_switching().  */
6439223Sgibbs
6539223Sgibbs/* This structure contains the information for each insn which requires
6639223Sgibbs   either single or double mode to be set.
6739223Sgibbs   MODE is the mode this insn must be executed in.
6839223Sgibbs   INSN_PTR is the insn to be executed (may be the note that marks the
6939223Sgibbs   beginning of a basic block).
7039223Sgibbs   BBNUM is the flow graph basic block this insn occurs in.
7139223Sgibbs   NEXT is the next insn in the same basic block.  */
7239223Sgibbsstruct seginfo
7339223Sgibbs{
7439223Sgibbs  int mode;
7539223Sgibbs  rtx insn_ptr;
7639223Sgibbs  int bbnum;
7739223Sgibbs  struct seginfo *next;
7839223Sgibbs  HARD_REG_SET regs_live;
7939223Sgibbs};
8039223Sgibbs
8139223Sgibbsstruct bb_info
8239223Sgibbs{
8339223Sgibbs  struct seginfo *seginfo;
8439223Sgibbs  int computing;
8539223Sgibbs};
8639223Sgibbs
8739223Sgibbs/* These bitmaps are used for the LCM algorithm.  */
8839223Sgibbs
8939223Sgibbsstatic sbitmap *antic;
9039223Sgibbsstatic sbitmap *transp;
9139223Sgibbsstatic sbitmap *comp;
9239223Sgibbs
9339223Sgibbsstatic struct seginfo * new_seginfo (int, rtx, int, HARD_REG_SET);
9439223Sgibbsstatic void add_seginfo (struct bb_info *, struct seginfo *);
9539223Sgibbsstatic void reg_dies (rtx, HARD_REG_SET);
9639223Sgibbsstatic void reg_becomes_live (rtx, rtx, void *);
9739223Sgibbsstatic void make_preds_opaque (basic_block, int);
9839223Sgibbs
9939223Sgibbs
10039223Sgibbs/* This function will allocate a new BBINFO structure, initialized
10139223Sgibbs   with the MODE, INSN, and basic block BB parameters.  */
10239223Sgibbs
10339223Sgibbsstatic struct seginfo *
10439223Sgibbsnew_seginfo (int mode, rtx insn, int bb, HARD_REG_SET regs_live)
10539223Sgibbs{
10639223Sgibbs  struct seginfo *ptr;
10739223Sgibbs  ptr = XNEW (struct seginfo);
10839223Sgibbs  ptr->mode = mode;
10939223Sgibbs  ptr->insn_ptr = insn;
11039223Sgibbs  ptr->bbnum = bb;
11139223Sgibbs  ptr->next = NULL;
11239223Sgibbs  COPY_HARD_REG_SET (ptr->regs_live, regs_live);
11339223Sgibbs  return ptr;
11439223Sgibbs}
115106521Sjhb
11639223Sgibbs/* Add a seginfo element to the end of a list.
11739223Sgibbs   HEAD is a pointer to the list beginning.
11839223Sgibbs   INFO is the structure to be linked in.  */
11939223Sgibbs
12039223Sgibbsstatic void
12139223Sgibbsadd_seginfo (struct bb_info *head, struct seginfo *info)
12239223Sgibbs{
12339223Sgibbs  struct seginfo *ptr;
12439223Sgibbs
12539223Sgibbs  if (head->seginfo == NULL)
12639223Sgibbs    head->seginfo = info;
12739223Sgibbs  else
12839223Sgibbs    {
12939223Sgibbs      ptr = head->seginfo;
13039223Sgibbs      while (ptr->next != NULL)
13139223Sgibbs	ptr = ptr->next;
13239223Sgibbs      ptr->next = info;
13339223Sgibbs    }
13439223Sgibbs}
13539223Sgibbs
13639223Sgibbs/* Make all predecessors of basic block B opaque, recursively, till we hit
13739223Sgibbs   some that are already non-transparent, or an edge where aux is set; that
13839223Sgibbs   denotes that a mode set is to be done on that edge.
13939223Sgibbs   J is the bit number in the bitmaps that corresponds to the entity that
14039223Sgibbs   we are currently handling mode-switching for.  */
14139223Sgibbs
14239223Sgibbsstatic void
14339223Sgibbsmake_preds_opaque (basic_block b, int j)
144241592Sjhb{
14539223Sgibbs  edge e;
14639223Sgibbs  edge_iterator ei;
14739223Sgibbs
14839223Sgibbs  FOR_EACH_EDGE (e, ei, b->preds)
14939223Sgibbs    {
15039223Sgibbs      basic_block pb = e->src;
15139223Sgibbs
15239223Sgibbs      if (e->aux || ! TEST_BIT (transp[pb->index], j))
15339223Sgibbs	continue;
15439223Sgibbs
15539223Sgibbs      RESET_BIT (transp[pb->index], j);
15639223Sgibbs      make_preds_opaque (pb, j);
15739223Sgibbs    }
15839223Sgibbs}
15939223Sgibbs
16039223Sgibbs/* Record in LIVE that register REG died.  */
16139223Sgibbs
16239223Sgibbsstatic void
16339223Sgibbsreg_dies (rtx reg, HARD_REG_SET live)
16439223Sgibbs{
16539223Sgibbs  int regno, nregs;
166241592Sjhb
16739223Sgibbs  if (!REG_P (reg))
16839223Sgibbs    return;
16939223Sgibbs
17039223Sgibbs  regno = REGNO (reg);
17139223Sgibbs  if (regno < FIRST_PSEUDO_REGISTER)
17239223Sgibbs    for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0;
17339223Sgibbs	 nregs--)
17439223Sgibbs      CLEAR_HARD_REG_BIT (live, regno + nregs);
17539223Sgibbs}
17641048Sgibbs
17741048Sgibbs/* Record in LIVE that register REG became live.
17841048Sgibbs   This is called via note_stores.  */
17941048Sgibbs
18041048Sgibbsstatic void
18141048Sgibbsreg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *live)
18239223Sgibbs{
18339223Sgibbs  int regno, nregs;
18441048Sgibbs
18541048Sgibbs  if (GET_CODE (reg) == SUBREG)
18641048Sgibbs    reg = SUBREG_REG (reg);
18741048Sgibbs
18841048Sgibbs  if (!REG_P (reg))
18941048Sgibbs    return;
19041048Sgibbs
19141048Sgibbs  regno = REGNO (reg);
19241048Sgibbs  if (regno < FIRST_PSEUDO_REGISTER)
19341048Sgibbs    for (nregs = hard_regno_nregs[regno][GET_MODE (reg)] - 1; nregs >= 0;
19441048Sgibbs	 nregs--)
19541048Sgibbs      SET_HARD_REG_BIT (* (HARD_REG_SET *) live, regno + nregs);
19641048Sgibbs}
19741048Sgibbs
19839223Sgibbs/* Make sure if MODE_ENTRY is defined the MODE_EXIT is defined
19945791Speter   and vice versa.  */
20045791Speter#if defined (MODE_ENTRY) != defined (MODE_EXIT)
20145791Speter #error "Both MODE_ENTRY and MODE_EXIT must be defined"
20239223Sgibbs#endif
20345791Speter
20439223Sgibbs#if defined (MODE_ENTRY) && defined (MODE_EXIT)
20539223Sgibbs/* Split the fallthrough edge to the exit block, so that we can note
20639223Sgibbs   that there NORMAL_MODE is required.  Return the new block if it's
20739223Sgibbs   inserted before the exit block.  Otherwise return null.  */
20845791Speter
20945791Speterstatic basic_block
21045791Spetercreate_pre_exit (int n_entities, int *entity_map, const int *num_modes)
21145791Speter{
212241592Sjhb  edge eg;
21339223Sgibbs  edge_iterator ei;
21439223Sgibbs  basic_block pre_exit;
21539223Sgibbs
21645791Speter  /* The only non-call predecessor at this stage is a block with a
21739223Sgibbs     fallthrough edge; there can be at most one, but there could be
21845791Speter     none at all, e.g. when exit is called.  */
21945791Speter  pre_exit = 0;
22039223Sgibbs  FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR->preds)
22139223Sgibbs    if (eg->flags & EDGE_FALLTHRU)
22239223Sgibbs      {
22339223Sgibbs	basic_block src_bb = eg->src;
22439223Sgibbs	regset live_at_end = src_bb->il.rtl->global_live_at_end;
22539223Sgibbs	rtx last_insn, ret_reg;
22639223Sgibbs
22739223Sgibbs	gcc_assert (!pre_exit);
22839223Sgibbs	/* If this function returns a value at the end, we have to
22939223Sgibbs	   insert the final mode switch before the return value copy
23039223Sgibbs	   to its hard register.  */
23139223Sgibbs	if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1
23239223Sgibbs	    && NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
23339223Sgibbs	    && GET_CODE (PATTERN (last_insn)) == USE
23439223Sgibbs	    && GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
23539223Sgibbs	  {
23639223Sgibbs	    int ret_start = REGNO (ret_reg);
23739223Sgibbs	    int nregs = hard_regno_nregs[ret_start][GET_MODE (ret_reg)];
23839223Sgibbs	    int ret_end = ret_start + nregs;
23939223Sgibbs	    int short_block = 0;
24039223Sgibbs	    int maybe_builtin_apply = 0;
24139223Sgibbs	    int forced_late_switch = 0;
24239223Sgibbs	    rtx before_return_copy;
24339223Sgibbs
24439223Sgibbs	    do
245115555Sphk	      {
24639223Sgibbs		rtx return_copy = PREV_INSN (last_insn);
24739223Sgibbs		rtx return_copy_pat, copy_reg;
24839223Sgibbs		int copy_start, copy_num;
24939223Sgibbs		int j;
250115555Sphk
25139223Sgibbs		if (INSN_P (return_copy))
25239223Sgibbs		  {
253115555Sphk		    if (GET_CODE (PATTERN (return_copy)) == USE
25439223Sgibbs			&& GET_CODE (XEXP (PATTERN (return_copy), 0)) == REG
25539223Sgibbs			&& (FUNCTION_VALUE_REGNO_P
256115555Sphk			    (REGNO (XEXP (PATTERN (return_copy), 0)))))
25739223Sgibbs		      {
25839223Sgibbs			maybe_builtin_apply = 1;
25939223Sgibbs			last_insn = return_copy;
26039223Sgibbs			continue;
261115555Sphk		      }
26239223Sgibbs		    /* If the return register is not (in its entirety)
26339223Sgibbs		       likely spilled, the return copy might be
264115555Sphk		       partially or completely optimized away.  */
26539223Sgibbs		    return_copy_pat = single_set (return_copy);
26639223Sgibbs		    if (!return_copy_pat)
267115555Sphk		      {
26839223Sgibbs			return_copy_pat = PATTERN (return_copy);
26939223Sgibbs			if (GET_CODE (return_copy_pat) != CLOBBER)
27039223Sgibbs			  break;
271241592Sjhb		      }
27239223Sgibbs		    copy_reg = SET_DEST (return_copy_pat);
27339223Sgibbs		    if (GET_CODE (copy_reg) == REG)
27444581Sgibbs		      copy_start = REGNO (copy_reg);
27545791Speter		    else if (GET_CODE (copy_reg) == SUBREG
27644581Sgibbs			     && GET_CODE (SUBREG_REG (copy_reg)) == REG)
27745791Speter		      copy_start = REGNO (SUBREG_REG (copy_reg));
27844581Sgibbs		    else
27944581Sgibbs		      break;
28044581Sgibbs		    if (copy_start >= FIRST_PSEUDO_REGISTER)
28144581Sgibbs		      break;
28245791Speter		    copy_num
28344581Sgibbs		      = hard_regno_nregs[copy_start][GET_MODE (copy_reg)];
28444581Sgibbs
28544581Sgibbs		    /* If the return register is not likely spilled, - as is
28644581Sgibbs		       the case for floating point on SH4 - then it might
28744581Sgibbs		       be set by an arithmetic operation that needs a
28844581Sgibbs		       different mode than the exit block.  */
289241592Sjhb		    for (j = n_entities - 1; j >= 0; j--)
29044581Sgibbs		      {
29144581Sgibbs			int e = entity_map[j];
29244581Sgibbs			int mode = MODE_NEEDED (e, return_copy);
293241592Sjhb
29444581Sgibbs			if (mode != num_modes[e] && mode != MODE_EXIT (e))
29544581Sgibbs			  break;
29644581Sgibbs		      }
29744581Sgibbs		    if (j >= 0)
29844581Sgibbs		      {
29944581Sgibbs			/* For the SH4, floating point loads depend on fpscr,
30044581Sgibbs			   thus we might need to put the final mode switch
30144581Sgibbs			   after the return value copy.  That is still OK,
30244581Sgibbs			   because a floating point return value does not
30344581Sgibbs			   conflict with address reloads.  */
30444581Sgibbs			if (copy_start >= ret_start
30544581Sgibbs			    && copy_start + copy_num <= ret_end
30644581Sgibbs			    && OBJECT_P (SET_SRC (return_copy_pat)))
30744581Sgibbs			  forced_late_switch = 1;
30844581Sgibbs			break;
30944581Sgibbs		      }
31044581Sgibbs
31144581Sgibbs		    if (copy_start >= ret_start
31244581Sgibbs			&& copy_start + copy_num <= ret_end)
31344581Sgibbs		      nregs -= copy_num;
31444581Sgibbs		    else if (!maybe_builtin_apply
31544581Sgibbs			     || !FUNCTION_VALUE_REGNO_P (copy_start))
31644581Sgibbs		      break;
31744581Sgibbs		    last_insn = return_copy;
31844581Sgibbs		  }
31944581Sgibbs		/* ??? Exception handling can lead to the return value
32044581Sgibbs		   copy being already separated from the return value use,
32144581Sgibbs		   as in  unwind-dw2.c .
32244581Sgibbs		   Similarly, conditionally returning without a value,
32344581Sgibbs		   and conditionally using builtin_return can lead to an
32444581Sgibbs		   isolated use.  */
32544581Sgibbs		if (return_copy == BB_HEAD (src_bb))
32644581Sgibbs		  {
32744581Sgibbs		    short_block = 1;
32844581Sgibbs		    break;
32944581Sgibbs		  }
33044581Sgibbs		last_insn = return_copy;
33144581Sgibbs	      }
33244581Sgibbs	    while (nregs);
33344581Sgibbs
33444581Sgibbs	    /* If we didn't see a full return value copy, verify that there
33544581Sgibbs	       is a plausible reason for this.  If some, but not all of the
33644581Sgibbs	       return register is likely spilled, we can expect that there
33744581Sgibbs	       is a copy for the likely spilled part.  */
33839223Sgibbs	    gcc_assert (!nregs
33939223Sgibbs			|| forced_late_switch
34039223Sgibbs			|| short_block
34139223Sgibbs			|| !(CLASS_LIKELY_SPILLED_P
34245791Speter			     (REGNO_REG_CLASS (ret_start)))
34339223Sgibbs			|| (nregs
34445791Speter			    != hard_regno_nregs[ret_start][GET_MODE (ret_reg)])
34539223Sgibbs			/* For multi-hard-register floating point
34639223Sgibbs		   	   values, sometimes the likely-spilled part
34739223Sgibbs		   	   is ordinarily copied first, then the other
34839223Sgibbs		   	   part is set with an arithmetic operation.
34939223Sgibbs		   	   This doesn't actually cause reload
35039223Sgibbs		   	   failures, so let it pass.  */
35139223Sgibbs			|| (GET_MODE_CLASS (GET_MODE (ret_reg)) != MODE_INT
35239223Sgibbs			    && nregs != 1));
35339223Sgibbs
35439223Sgibbs	    if (INSN_P (last_insn))
35539223Sgibbs	      {
35639223Sgibbs		before_return_copy
35739223Sgibbs		  = emit_note_before (NOTE_INSN_DELETED, last_insn);
35839223Sgibbs		/* Instructions preceding LAST_INSN in the same block might
35939223Sgibbs		   require a different mode than MODE_EXIT, so if we might
36039223Sgibbs		   have such instructions, keep them in a separate block
36139223Sgibbs		   from pre_exit.  */
36245791Speter		if (last_insn != BB_HEAD (src_bb))
36339223Sgibbs		  src_bb = split_block (src_bb,
36439223Sgibbs					PREV_INSN (before_return_copy))->dest;
36539223Sgibbs	      }
36639223Sgibbs	    else
36739223Sgibbs	      before_return_copy = last_insn;
36839223Sgibbs	    pre_exit = split_block (src_bb, before_return_copy)->src;
36945791Speter	  }
37039223Sgibbs	else
37139223Sgibbs	  {
37239223Sgibbs	    pre_exit = split_edge (eg);
37339223Sgibbs	    COPY_REG_SET (pre_exit->il.rtl->global_live_at_start, live_at_end);
37439223Sgibbs	    COPY_REG_SET (pre_exit->il.rtl->global_live_at_end, live_at_end);
37539223Sgibbs	  }
37645791Speter      }
37739223Sgibbs
37839223Sgibbs  return pre_exit;
37939223Sgibbs}
38039223Sgibbs#endif
38139223Sgibbs
38239223Sgibbs/* Find all insns that need a particular mode setting, and insert the
38339223Sgibbs   necessary mode switches.  Return true if we did work.  */
38439223Sgibbs
385241592Sjhbstatic int
38639223Sgibbsoptimize_mode_switching (void)
387241592Sjhb{
38839223Sgibbs  rtx insn;
38945791Speter  int e;
39039223Sgibbs  basic_block bb;
39139223Sgibbs  int need_commit = 0;
39239223Sgibbs  sbitmap *kill;
39339223Sgibbs  struct edge_list *edge_list;
39439223Sgibbs  static const int num_modes[] = NUM_MODES_FOR_MODE_SWITCHING;
39539223Sgibbs#define N_ENTITIES ARRAY_SIZE (num_modes)
39639223Sgibbs  int entity_map[N_ENTITIES];
397241592Sjhb  struct bb_info *bb_info[N_ENTITIES];
39839223Sgibbs  int i, j;
39939223Sgibbs  int n_entities;
40039223Sgibbs  int max_num_modes = 0;
40139223Sgibbs  bool emited = false;
40239223Sgibbs  basic_block post_entry ATTRIBUTE_UNUSED, pre_exit ATTRIBUTE_UNUSED;
40339223Sgibbs
40439223Sgibbs  clear_bb_flags ();
40539223Sgibbs
40639223Sgibbs  for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--)
40739223Sgibbs    if (OPTIMIZE_MODE_SWITCHING (e))
40839223Sgibbs      {
40945791Speter	int entry_exit_extra = 0;
41039223Sgibbs
41145791Speter	/* Create the list of segments within each basic block.
41239223Sgibbs	   If NORMAL_MODE is defined, allow for two extra
41339223Sgibbs	   blocks split from the entry and exit block.  */
41439223Sgibbs#if defined (MODE_ENTRY) && defined (MODE_EXIT)
41539223Sgibbs	entry_exit_extra = 3;
41639223Sgibbs#endif
41739223Sgibbs	bb_info[n_entities]
41839223Sgibbs	  = XCNEWVEC (struct bb_info, last_basic_block + entry_exit_extra);
419241592Sjhb	entity_map[n_entities++] = e;
42039223Sgibbs	if (num_modes[e] > max_num_modes)
42139223Sgibbs	  max_num_modes = num_modes[e];
42239223Sgibbs      }
42339223Sgibbs
424241592Sjhb  if (! n_entities)
42545791Speter    return 0;
42639223Sgibbs
42739223Sgibbs#if defined (MODE_ENTRY) && defined (MODE_EXIT)
42839223Sgibbs  /* Split the edge from the entry block, so that we can note that
42939223Sgibbs     there NORMAL_MODE is supplied.  */
43039223Sgibbs  post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
43139223Sgibbs  pre_exit = create_pre_exit (n_entities, entity_map, num_modes);
43239223Sgibbs#endif
43339223Sgibbs
43439223Sgibbs  /* Create the bitmap vectors.  */
43539223Sgibbs
43639223Sgibbs  antic = sbitmap_vector_alloc (last_basic_block, n_entities);
43739223Sgibbs  transp = sbitmap_vector_alloc (last_basic_block, n_entities);
43839223Sgibbs  comp = sbitmap_vector_alloc (last_basic_block, n_entities);
43939223Sgibbs
44039223Sgibbs  sbitmap_vector_ones (transp, last_basic_block);
44139223Sgibbs
44239223Sgibbs  for (j = n_entities - 1; j >= 0; j--)
443241592Sjhb    {
44445791Speter      int e = entity_map[j];
44545791Speter      int no_mode = num_modes[e];
44645791Speter      struct bb_info *info = bb_info[j];
44739223Sgibbs
44839223Sgibbs      /* Determine what the first use (if any) need for a mode of entity E is.
44939223Sgibbs	 This will be the mode that is anticipatable for this block.
45039223Sgibbs	 Also compute the initial transparency settings.  */
45139223Sgibbs      FOR_EACH_BB (bb)
45239223Sgibbs	{
45339223Sgibbs	  struct seginfo *ptr;
45439223Sgibbs	  int last_mode = no_mode;
45539223Sgibbs	  HARD_REG_SET live_now;
45639223Sgibbs
45739223Sgibbs	  REG_SET_TO_HARD_REG_SET (live_now,
45839223Sgibbs				   bb->il.rtl->global_live_at_start);
45939223Sgibbs
460241592Sjhb	  /* Pretend the mode is clobbered across abnormal edges.  */
46145791Speter	  {
46245791Speter	    edge_iterator ei;
46345791Speter	    edge e;
46439223Sgibbs	    FOR_EACH_EDGE (e, ei, bb->preds)
46539223Sgibbs	      if (e->flags & EDGE_COMPLEX)
46639223Sgibbs		break;
46739223Sgibbs	    if (e)
46839223Sgibbs	      {
46939223Sgibbs		ptr = new_seginfo (no_mode, BB_HEAD (bb), bb->index, live_now);
47039223Sgibbs		add_seginfo (info + bb->index, ptr);
47139223Sgibbs		RESET_BIT (transp[bb->index], j);
47239223Sgibbs	      }
47339223Sgibbs	  }
47439223Sgibbs
47539223Sgibbs	  for (insn = BB_HEAD (bb);
47639223Sgibbs	       insn != NULL && insn != NEXT_INSN (BB_END (bb));
47739223Sgibbs	       insn = NEXT_INSN (insn))
47839223Sgibbs	    {
47939223Sgibbs	      if (INSN_P (insn))
48039223Sgibbs		{
48139223Sgibbs		  int mode = MODE_NEEDED (e, insn);
48239223Sgibbs		  rtx link;
48339223Sgibbs
48439223Sgibbs		  if (mode != no_mode && mode != last_mode)
48539223Sgibbs		    {
48639223Sgibbs		      last_mode = mode;
48739223Sgibbs		      ptr = new_seginfo (mode, insn, bb->index, live_now);
48839223Sgibbs		      add_seginfo (info + bb->index, ptr);
48939223Sgibbs		      RESET_BIT (transp[bb->index], j);
49039223Sgibbs		    }
49139223Sgibbs#ifdef MODE_AFTER
49239223Sgibbs		  last_mode = MODE_AFTER (last_mode, insn);
49339223Sgibbs#endif
494241592Sjhb		  /* Update LIVE_NOW.  */
49539223Sgibbs		  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
49639223Sgibbs		    if (REG_NOTE_KIND (link) == REG_DEAD)
49739223Sgibbs		      reg_dies (XEXP (link, 0), live_now);
49839223Sgibbs
49939223Sgibbs		  note_stores (PATTERN (insn), reg_becomes_live, &live_now);
50068661Sgibbs		  for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
50139223Sgibbs		    if (REG_NOTE_KIND (link) == REG_UNUSED)
50239223Sgibbs		      reg_dies (XEXP (link, 0), live_now);
50341514Sarchie		}
50439223Sgibbs	    }
50568661Sgibbs
50668661Sgibbs	  info[bb->index].computing = last_mode;
50768661Sgibbs	  /* Check for blocks without ANY mode requirements.  */
50868661Sgibbs	  if (last_mode == no_mode)
50968661Sgibbs	    {
51068661Sgibbs	      ptr = new_seginfo (no_mode, BB_END (bb), bb->index, live_now);
51168661Sgibbs	      add_seginfo (info + bb->index, ptr);
51241514Sarchie	    }
51339223Sgibbs	}
51439223Sgibbs#if defined (MODE_ENTRY) && defined (MODE_EXIT)
51539223Sgibbs      {
51641514Sarchie	int mode = MODE_ENTRY (e);
51739223Sgibbs
51839223Sgibbs	if (mode != no_mode)
51939223Sgibbs	  {
52039223Sgibbs	    bb = post_entry;
52139223Sgibbs
52239223Sgibbs	    /* By always making this nontransparent, we save
52339223Sgibbs	       an extra check in make_preds_opaque.  We also
52439223Sgibbs	       need this to avoid confusing pre_edge_lcm when
52539223Sgibbs	       antic is cleared but transp and comp are set.  */
526241592Sjhb	    RESET_BIT (transp[bb->index], j);
52745791Speter
52845791Speter	    /* Insert a fake computing definition of MODE into entry
52945791Speter	       blocks which compute no mode. This represents the mode on
53039223Sgibbs	       entry.  */
53139223Sgibbs	    info[bb->index].computing = mode;
53239223Sgibbs
53339223Sgibbs	    if (pre_exit)
53439223Sgibbs	      info[pre_exit->index].seginfo->mode = MODE_EXIT (e);
53539223Sgibbs	  }
53639223Sgibbs      }
53739223Sgibbs#endif /* NORMAL_MODE */
53839223Sgibbs    }
53939223Sgibbs
54044581Sgibbs  kill = sbitmap_vector_alloc (last_basic_block, n_entities);
54144581Sgibbs  for (i = 0; i < max_num_modes; i++)
54239223Sgibbs    {
54339223Sgibbs      int current_mode[N_ENTITIES];
54439223Sgibbs      sbitmap *delete;
54539223Sgibbs      sbitmap *insert;
54639223Sgibbs
54739223Sgibbs      /* Set the anticipatable and computing arrays.  */
54839223Sgibbs      sbitmap_vector_zero (antic, last_basic_block);
54939223Sgibbs      sbitmap_vector_zero (comp, last_basic_block);
55039223Sgibbs      for (j = n_entities - 1; j >= 0; j--)
55139223Sgibbs	{
55239223Sgibbs	  int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
55339223Sgibbs	  struct bb_info *info = bb_info[j];
55439223Sgibbs
55539223Sgibbs	  FOR_EACH_BB (bb)
55639223Sgibbs	    {
55739223Sgibbs	      if (info[bb->index].seginfo->mode == m)
55839223Sgibbs		SET_BIT (antic[bb->index], j);
55939223Sgibbs
56039223Sgibbs	      if (info[bb->index].computing == m)
56139223Sgibbs		SET_BIT (comp[bb->index], j);
56239223Sgibbs	    }
56339223Sgibbs	}
56439223Sgibbs
56539223Sgibbs      /* Calculate the optimal locations for the
56639223Sgibbs	 placement mode switches to modes with priority I.  */
56739223Sgibbs
56839223Sgibbs      FOR_EACH_BB (bb)
56939223Sgibbs	sbitmap_not (kill[bb->index], transp[bb->index]);
57039223Sgibbs      edge_list = pre_edge_lcm (n_entities, transp, comp, antic,
57139223Sgibbs				kill, &insert, &delete);
57239223Sgibbs
57339223Sgibbs      for (j = n_entities - 1; j >= 0; j--)
57439223Sgibbs	{
57539223Sgibbs	  /* Insert all mode sets that have been inserted by lcm.  */
57639223Sgibbs	  int no_mode = num_modes[entity_map[j]];
57739223Sgibbs
57839223Sgibbs	  /* Wherever we have moved a mode setting upwards in the flow graph,
57939223Sgibbs	     the blocks between the new setting site and the now redundant
58039223Sgibbs	     computation ceases to be transparent for any lower-priority
58139223Sgibbs	     mode of the same entity.  First set the aux field of each
58239223Sgibbs	     insertion site edge non-transparent, then propagate the new
58339223Sgibbs	     non-transparency from the redundant computation upwards till
58439223Sgibbs	     we hit an insertion site or an already non-transparent block.  */
58539223Sgibbs	  for (e = NUM_EDGES (edge_list) - 1; e >= 0; e--)
58639223Sgibbs	    {
58739223Sgibbs	      edge eg = INDEX_EDGE (edge_list, e);
58839223Sgibbs	      int mode;
58939223Sgibbs	      basic_block src_bb;
59039223Sgibbs	      HARD_REG_SET live_at_edge;
59139223Sgibbs	      rtx mode_set;
59239223Sgibbs
59339223Sgibbs	      eg->aux = 0;
59439223Sgibbs
59539223Sgibbs	      if (! TEST_BIT (insert[e], j))
59639223Sgibbs		continue;
59739223Sgibbs
59839223Sgibbs	      eg->aux = (void *)1;
59939223Sgibbs
60039223Sgibbs	      mode = current_mode[j];
60139223Sgibbs	      src_bb = eg->src;
60239223Sgibbs
60339223Sgibbs	      REG_SET_TO_HARD_REG_SET (live_at_edge,
60439223Sgibbs				       src_bb->il.rtl->global_live_at_end);
60539223Sgibbs
60639223Sgibbs	      start_sequence ();
60739223Sgibbs	      EMIT_MODE_SET (entity_map[j], mode, live_at_edge);
60839223Sgibbs	      mode_set = get_insns ();
60939223Sgibbs	      end_sequence ();
61039223Sgibbs
61139223Sgibbs	      /* Do not bother to insert empty sequence.  */
61239223Sgibbs	      if (mode_set == NULL_RTX)
613241592Sjhb		continue;
61445791Speter
61545791Speter	      /* We should not get an abnormal edge here.  */
61645791Speter	      gcc_assert (! (eg->flags & EDGE_ABNORMAL));
61739223Sgibbs
61839223Sgibbs	      need_commit = 1;
61939223Sgibbs	      insert_insn_on_edge (mode_set, eg);
62039223Sgibbs	    }
62139223Sgibbs
62239223Sgibbs	  FOR_EACH_BB_REVERSE (bb)
62339223Sgibbs	    if (TEST_BIT (delete[bb->index], j))
62439223Sgibbs	      {
62539223Sgibbs		make_preds_opaque (bb, j);
62639223Sgibbs		/* Cancel the 'deleted' mode set.  */
62739223Sgibbs		bb_info[j][bb->index].seginfo->mode = no_mode;
62839223Sgibbs	      }
62939223Sgibbs	}
63039223Sgibbs
63139223Sgibbs      sbitmap_vector_free (delete);
63239223Sgibbs      sbitmap_vector_free (insert);
63339223Sgibbs      clear_aux_for_edges ();
63439223Sgibbs      free_edge_list (edge_list);
63539223Sgibbs    }
63639223Sgibbs
63739223Sgibbs  /* Now output the remaining mode sets in all the segments.  */
63839223Sgibbs  for (j = n_entities - 1; j >= 0; j--)
63939223Sgibbs    {
64039223Sgibbs      int no_mode = num_modes[entity_map[j]];
64139223Sgibbs
64239223Sgibbs      FOR_EACH_BB_REVERSE (bb)
64339223Sgibbs	{
64439223Sgibbs	  struct seginfo *ptr, *next;
64539223Sgibbs	  for (ptr = bb_info[j][bb->index].seginfo; ptr; ptr = next)
64639223Sgibbs	    {
64739223Sgibbs	      next = ptr->next;
64839223Sgibbs	      if (ptr->mode != no_mode)
649241592Sjhb		{
65045791Speter		  rtx mode_set;
65145791Speter
65245791Speter		  start_sequence ();
65339223Sgibbs		  EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live);
65439223Sgibbs		  mode_set = get_insns ();
65539223Sgibbs		  end_sequence ();
65639223Sgibbs
65739223Sgibbs		  /* Insert MODE_SET only if it is nonempty.  */
65839223Sgibbs		  if (mode_set != NULL_RTX)
65939223Sgibbs		    {
66039223Sgibbs		      emited = true;
66139223Sgibbs		      if (NOTE_P (ptr->insn_ptr)
66239223Sgibbs			  && (NOTE_LINE_NUMBER (ptr->insn_ptr)
66339223Sgibbs			      == NOTE_INSN_BASIC_BLOCK))
66439223Sgibbs			emit_insn_after (mode_set, ptr->insn_ptr);
66539223Sgibbs		      else
66639223Sgibbs			emit_insn_before (mode_set, ptr->insn_ptr);
66739223Sgibbs		    }
66839223Sgibbs		}
66939223Sgibbs
67039223Sgibbs	      free (ptr);
67139223Sgibbs	    }
67239223Sgibbs	}
67339223Sgibbs
67439223Sgibbs      free (bb_info[j]);
67539223Sgibbs    }
67639223Sgibbs
677241592Sjhb  /* Finished. Free up all the things we've allocated.  */
67839223Sgibbs
67945791Speter  sbitmap_vector_free (kill);
68045791Speter  sbitmap_vector_free (antic);
68139223Sgibbs  sbitmap_vector_free (transp);
68239223Sgibbs  sbitmap_vector_free (comp);
68339223Sgibbs
68439223Sgibbs  if (need_commit)
68539223Sgibbs    commit_edge_insertions ();
68639223Sgibbs
68739223Sgibbs#if defined (MODE_ENTRY) && defined (MODE_EXIT)
68839223Sgibbs  cleanup_cfg (CLEANUP_NO_INSN_DEL);
68939223Sgibbs#else
69039223Sgibbs  if (!need_commit && !emited)
69139223Sgibbs    return 0;
69245791Speter#endif
69339223Sgibbs
69445791Speter  max_regno = max_reg_num ();
69545791Speter  allocate_reg_info (max_regno, FALSE, FALSE);
69639223Sgibbs  update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
69745791Speter				    (PROP_DEATH_NOTES | PROP_KILL_DEAD_CODE
69839223Sgibbs				     | PROP_SCAN_DEAD_CODE));
69939223Sgibbs
70039223Sgibbs  return 1;
70139223Sgibbs}
70239223Sgibbs
70339223Sgibbs#endif /* OPTIMIZE_MODE_SWITCHING */
70439223Sgibbs
70539223Sgibbsstatic bool
70639223Sgibbsgate_mode_switching (void)
70739223Sgibbs{
70839223Sgibbs#ifdef OPTIMIZE_MODE_SWITCHING
70939223Sgibbs  return true;
71039223Sgibbs#else
71139223Sgibbs  return false;
71239223Sgibbs#endif
71339223Sgibbs}
71439223Sgibbs
71561686Salexstatic unsigned int
71639223Sgibbsrest_of_handle_mode_switching (void)
71739223Sgibbs{
71839223Sgibbs#ifdef OPTIMIZE_MODE_SWITCHING
71939223Sgibbs  no_new_pseudos = 0;
72039223Sgibbs  optimize_mode_switching ();
72139223Sgibbs  no_new_pseudos = 1;
72239223Sgibbs#endif /* OPTIMIZE_MODE_SWITCHING */
72339223Sgibbs  return 0;
724112782Smdodd}
725112782Smdodd
726112782Smdodd
727112782Smdoddstruct tree_opt_pass pass_mode_switching =
728112782Smdodd{
729112782Smdodd  "mode-sw",                            /* name */
730112782Smdodd  gate_mode_switching,                  /* gate */
731112782Smdodd  rest_of_handle_mode_switching,        /* execute */
732112782Smdodd  NULL,                                 /* sub */
733112782Smdodd  NULL,                                 /* next */
734112782Smdodd  0,                                    /* static_pass_number */
735117126Sscottl  TV_MODE_SWITCH,                       /* tv_id */
736241592Sjhb  0,                                    /* properties_required */
737112782Smdodd  0,                                    /* properties_provided */
73839223Sgibbs  0,                                    /* properties_destroyed */
73939223Sgibbs  0,                                    /* todo_flags_start */
74039223Sgibbs  TODO_dump_func,                       /* todo_flags_finish */
74139223Sgibbs  0                                     /* letter */
74239223Sgibbs};
743112782Smdodd