1/* Control flow functions for trees.
2   Copyright (C) 2001-2015 Free Software Foundation, Inc.
3   Contributed by Diego Novillo <dnovillo@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "hash-table.h"
25#include "hash-map.h"
26#include "tm.h"
27#include "hash-set.h"
28#include "machmode.h"
29#include "vec.h"
30#include "double-int.h"
31#include "input.h"
32#include "alias.h"
33#include "symtab.h"
34#include "wide-int.h"
35#include "inchash.h"
36#include "tree.h"
37#include "fold-const.h"
38#include "trans-mem.h"
39#include "stor-layout.h"
40#include "print-tree.h"
41#include "tm_p.h"
42#include "predict.h"
43#include "hard-reg-set.h"
44#include "function.h"
45#include "dominance.h"
46#include "cfg.h"
47#include "cfganal.h"
48#include "basic-block.h"
49#include "flags.h"
50#include "gimple-pretty-print.h"
51#include "tree-ssa-alias.h"
52#include "internal-fn.h"
53#include "gimple-fold.h"
54#include "tree-eh.h"
55#include "gimple-expr.h"
56#include "is-a.h"
57#include "gimple.h"
58#include "gimple-iterator.h"
59#include "gimplify-me.h"
60#include "gimple-walk.h"
61#include "gimple-ssa.h"
62#include "plugin-api.h"
63#include "ipa-ref.h"
64#include "cgraph.h"
65#include "tree-cfg.h"
66#include "tree-phinodes.h"
67#include "ssa-iterators.h"
68#include "stringpool.h"
69#include "tree-ssanames.h"
70#include "tree-ssa-loop-manip.h"
71#include "tree-ssa-loop-niter.h"
72#include "tree-into-ssa.h"
73#include "hashtab.h"
74#include "rtl.h"
75#include "statistics.h"
76#include "real.h"
77#include "fixed-value.h"
78#include "insn-config.h"
79#include "expmed.h"
80#include "dojump.h"
81#include "explow.h"
82#include "calls.h"
83#include "emit-rtl.h"
84#include "varasm.h"
85#include "stmt.h"
86#include "expr.h"
87#include "tree-dfa.h"
88#include "tree-ssa.h"
89#include "tree-dump.h"
90#include "tree-pass.h"
91#include "diagnostic-core.h"
92#include "except.h"
93#include "cfgloop.h"
94#include "tree-ssa-propagate.h"
95#include "value-prof.h"
96#include "tree-inline.h"
97#include "target.h"
98#include "tree-ssa-live.h"
99#include "omp-low.h"
100#include "tree-cfgcleanup.h"
101#include "wide-int-print.h"
102
103/* This file contains functions for building the Control Flow Graph (CFG)
104   for a function tree.  */
105
106/* Local declarations.  */
107
108/* Initial capacity for the basic block array.  */
109static const int initial_cfg_capacity = 20;
110
111/* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
112   which use a particular edge.  The CASE_LABEL_EXPRs are chained together
113   via their CASE_CHAIN field, which we clear after we're done with the
114   hash table to prevent problems with duplication of GIMPLE_SWITCHes.
115
116   Access to this list of CASE_LABEL_EXPRs allows us to efficiently
117   update the case vector in response to edge redirections.
118
119   Right now this table is set up and torn down at key points in the
120   compilation process.  It would be nice if we could make the table
121   more persistent.  The key is getting notification of changes to
122   the CFG (particularly edge removal, creation and redirection).  */
123
124static hash_map<edge, tree> *edge_to_cases;
125
126/* If we record edge_to_cases, this bitmap will hold indexes
127   of basic blocks that end in a GIMPLE_SWITCH which we touched
128   due to edge manipulations.  */
129
130static bitmap touched_switch_bbs;
131
132/* CFG statistics.  */
133struct cfg_stats_d
134{
135  long num_merged_labels;
136};
137
138static struct cfg_stats_d cfg_stats;
139
140/* Hash table to store last discriminator assigned for each locus.  */
141struct locus_discrim_map
142{
143  location_t locus;
144  int discriminator;
145};
146
147/* Hashtable helpers.  */
148
149struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
150{
151  typedef locus_discrim_map value_type;
152  typedef locus_discrim_map compare_type;
153  static inline hashval_t hash (const value_type *);
154  static inline bool equal (const value_type *, const compare_type *);
155};
156
157/* Trivial hash function for a location_t.  ITEM is a pointer to
158   a hash table entry that maps a location_t to a discriminator.  */
159
160inline hashval_t
161locus_discrim_hasher::hash (const value_type *item)
162{
163  return LOCATION_LINE (item->locus);
164}
165
166/* Equality function for the locus-to-discriminator map.  A and B
167   point to the two hash table entries to compare.  */
168
169inline bool
170locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
171{
172  return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
173}
174
175static hash_table<locus_discrim_hasher> *discriminator_per_locus;
176
177/* Basic blocks and flowgraphs.  */
178static void make_blocks (gimple_seq);
179
180/* Edges.  */
181static void make_edges (void);
182static void assign_discriminators (void);
183static void make_cond_expr_edges (basic_block);
184static void make_gimple_switch_edges (gswitch *, basic_block);
185static bool make_goto_expr_edges (basic_block);
186static void make_gimple_asm_edges (basic_block);
187static edge gimple_redirect_edge_and_branch (edge, basic_block);
188static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
189
190/* Various helpers.  */
191static inline bool stmt_starts_bb_p (gimple, gimple);
192static int gimple_verify_flow_info (void);
193static void gimple_make_forwarder_block (edge);
194static gimple first_non_label_stmt (basic_block);
195static bool verify_gimple_transaction (gtransaction *);
196static bool call_can_make_abnormal_goto (gimple);
197
198/* Flowgraph optimization and cleanup.  */
199static void gimple_merge_blocks (basic_block, basic_block);
200static bool gimple_can_merge_blocks_p (basic_block, basic_block);
201static void remove_bb (basic_block);
202static edge find_taken_edge_computed_goto (basic_block, tree);
203static edge find_taken_edge_cond_expr (basic_block, tree);
204static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
205static tree find_case_label_for_value (gswitch *, tree);
206
207void
208init_empty_tree_cfg_for_function (struct function *fn)
209{
210  /* Initialize the basic block array.  */
211  init_flow (fn);
212  profile_status_for_fn (fn) = PROFILE_ABSENT;
213  n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
214  last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
215  vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
216  vec_safe_grow_cleared (basic_block_info_for_fn (fn),
217			 initial_cfg_capacity);
218
219  /* Build a mapping of labels to their associated blocks.  */
220  vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
221  vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
222			 initial_cfg_capacity);
223
224  SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
225  SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
226
227  ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
228    = EXIT_BLOCK_PTR_FOR_FN (fn);
229  EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
230    = ENTRY_BLOCK_PTR_FOR_FN (fn);
231}
232
233void
234init_empty_tree_cfg (void)
235{
236  init_empty_tree_cfg_for_function (cfun);
237}
238
239/*---------------------------------------------------------------------------
240			      Create basic blocks
241---------------------------------------------------------------------------*/
242
243/* Entry point to the CFG builder for trees.  SEQ is the sequence of
244   statements to be added to the flowgraph.  */
245
246static void
247build_gimple_cfg (gimple_seq seq)
248{
249  /* Register specific gimple functions.  */
250  gimple_register_cfg_hooks ();
251
252  memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
253
254  init_empty_tree_cfg ();
255
256  make_blocks (seq);
257
258  /* Make sure there is always at least one block, even if it's empty.  */
259  if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
260    create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
261
262  /* Adjust the size of the array.  */
263  if (basic_block_info_for_fn (cfun)->length ()
264      < (size_t) n_basic_blocks_for_fn (cfun))
265    vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
266			   n_basic_blocks_for_fn (cfun));
267
268  /* To speed up statement iterator walks, we first purge dead labels.  */
269  cleanup_dead_labels ();
270
271  /* Group case nodes to reduce the number of edges.
272     We do this after cleaning up dead labels because otherwise we miss
273     a lot of obvious case merging opportunities.  */
274  group_case_labels ();
275
276  /* Create the edges of the flowgraph.  */
277  discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
278  make_edges ();
279  assign_discriminators ();
280  cleanup_dead_labels ();
281  delete discriminator_per_locus;
282  discriminator_per_locus = NULL;
283}
284
285/* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
286   them and propagate the information to LOOP.  We assume that the annotations
287   come immediately before the condition in BB, if any.  */
288
289static void
290replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
291{
292  gimple_stmt_iterator gsi = gsi_last_bb (bb);
293  gimple stmt = gsi_stmt (gsi);
294
295  if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
296    return;
297
298  for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
299    {
300      stmt = gsi_stmt (gsi);
301      if (gimple_code (stmt) != GIMPLE_CALL)
302	break;
303      if (!gimple_call_internal_p (stmt)
304	  || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
305	break;
306
307      switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
308	{
309	case annot_expr_ivdep_kind:
310	  loop->safelen = INT_MAX;
311	  break;
312	case annot_expr_no_vector_kind:
313	  loop->dont_vectorize = true;
314	  break;
315	case annot_expr_vector_kind:
316	  loop->force_vectorize = true;
317	  cfun->has_force_vectorize_loops = true;
318	  break;
319	default:
320	  gcc_unreachable ();
321	}
322
323      stmt = gimple_build_assign (gimple_call_lhs (stmt),
324				  gimple_call_arg (stmt, 0));
325      gsi_replace (&gsi, stmt, true);
326    }
327}
328
329/* Look for ANNOTATE calls with loop annotation kind; if found, remove
330   them and propagate the information to the loop.  We assume that the
331   annotations come immediately before the condition of the loop.  */
332
333static void
334replace_loop_annotate (void)
335{
336  struct loop *loop;
337  basic_block bb;
338  gimple_stmt_iterator gsi;
339  gimple stmt;
340
341  FOR_EACH_LOOP (loop, 0)
342    {
343      /* First look into the header.  */
344      replace_loop_annotate_in_block (loop->header, loop);
345
346      /* Then look into the latch, if any.  */
347      if (loop->latch)
348	replace_loop_annotate_in_block (loop->latch, loop);
349    }
350
351  /* Remove IFN_ANNOTATE.  Safeguard for the case loop->latch == NULL.  */
352  FOR_EACH_BB_FN (bb, cfun)
353    {
354      for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
355	{
356	  stmt = gsi_stmt (gsi);
357	  if (gimple_code (stmt) != GIMPLE_CALL)
358	    continue;
359	  if (!gimple_call_internal_p (stmt)
360	      || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
361	    continue;
362
363	  switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
364	    {
365	    case annot_expr_ivdep_kind:
366	    case annot_expr_no_vector_kind:
367	    case annot_expr_vector_kind:
368	      break;
369	    default:
370	      gcc_unreachable ();
371	    }
372
373	  warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
374	  stmt = gimple_build_assign (gimple_call_lhs (stmt),
375				      gimple_call_arg (stmt, 0));
376	  gsi_replace (&gsi, stmt, true);
377	}
378    }
379}
380
381
382static unsigned int
383execute_build_cfg (void)
384{
385  gimple_seq body = gimple_body (current_function_decl);
386
387  build_gimple_cfg (body);
388  gimple_set_body (current_function_decl, NULL);
389  if (dump_file && (dump_flags & TDF_DETAILS))
390    {
391      fprintf (dump_file, "Scope blocks:\n");
392      dump_scope_blocks (dump_file, dump_flags);
393    }
394  cleanup_tree_cfg ();
395  loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
396  replace_loop_annotate ();
397  return 0;
398}
399
400namespace {
401
402const pass_data pass_data_build_cfg =
403{
404  GIMPLE_PASS, /* type */
405  "cfg", /* name */
406  OPTGROUP_NONE, /* optinfo_flags */
407  TV_TREE_CFG, /* tv_id */
408  PROP_gimple_leh, /* properties_required */
409  ( PROP_cfg | PROP_loops ), /* properties_provided */
410  0, /* properties_destroyed */
411  0, /* todo_flags_start */
412  0, /* todo_flags_finish */
413};
414
415class pass_build_cfg : public gimple_opt_pass
416{
417public:
418  pass_build_cfg (gcc::context *ctxt)
419    : gimple_opt_pass (pass_data_build_cfg, ctxt)
420  {}
421
422  /* opt_pass methods: */
423  virtual unsigned int execute (function *) { return execute_build_cfg (); }
424
425}; // class pass_build_cfg
426
427} // anon namespace
428
429gimple_opt_pass *
430make_pass_build_cfg (gcc::context *ctxt)
431{
432  return new pass_build_cfg (ctxt);
433}
434
435
436/* Return true if T is a computed goto.  */
437
438bool
439computed_goto_p (gimple t)
440{
441  return (gimple_code (t) == GIMPLE_GOTO
442	  && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
443}
444
445/* Returns true for edge E where e->src ends with a GIMPLE_COND and
446   the other edge points to a bb with just __builtin_unreachable ().
447   I.e. return true for C->M edge in:
448   <bb C>:
449   ...
450   if (something)
451     goto <bb N>;
452   else
453     goto <bb M>;
454   <bb N>:
455   __builtin_unreachable ();
456   <bb M>:  */
457
458bool
459assert_unreachable_fallthru_edge_p (edge e)
460{
461  basic_block pred_bb = e->src;
462  gimple last = last_stmt (pred_bb);
463  if (last && gimple_code (last) == GIMPLE_COND)
464    {
465      basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
466      if (other_bb == e->dest)
467	other_bb = EDGE_SUCC (pred_bb, 1)->dest;
468      if (EDGE_COUNT (other_bb->succs) == 0)
469	{
470	  gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
471	  gimple stmt;
472
473	  if (gsi_end_p (gsi))
474	    return false;
475	  stmt = gsi_stmt (gsi);
476	  while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
477	    {
478	      gsi_next (&gsi);
479	      if (gsi_end_p (gsi))
480		return false;
481	      stmt = gsi_stmt (gsi);
482	    }
483	  return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
484	}
485    }
486  return false;
487}
488
489
490/* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
491   could alter control flow except via eh. We initialize the flag at
492   CFG build time and only ever clear it later.  */
493
494static void
495gimple_call_initialize_ctrl_altering (gimple stmt)
496{
497  int flags = gimple_call_flags (stmt);
498
499  /* A call alters control flow if it can make an abnormal goto.  */
500  if (call_can_make_abnormal_goto (stmt)
501      /* A call also alters control flow if it does not return.  */
502      || flags & ECF_NORETURN
503      /* TM ending statements have backedges out of the transaction.
504	 Return true so we split the basic block containing them.
505	 Note that the TM_BUILTIN test is merely an optimization.  */
506      || ((flags & ECF_TM_BUILTIN)
507	  && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
508      /* BUILT_IN_RETURN call is same as return statement.  */
509      || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
510    gimple_call_set_ctrl_altering (stmt, true);
511  else
512    gimple_call_set_ctrl_altering (stmt, false);
513}
514
515
516/* Build a flowgraph for the sequence of stmts SEQ.  */
517
518static void
519make_blocks (gimple_seq seq)
520{
521  gimple_stmt_iterator i = gsi_start (seq);
522  gimple stmt = NULL;
523  bool start_new_block = true;
524  bool first_stmt_of_seq = true;
525  basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
526
527  while (!gsi_end_p (i))
528    {
529      gimple prev_stmt;
530
531      prev_stmt = stmt;
532      stmt = gsi_stmt (i);
533
534      if (stmt && is_gimple_call (stmt))
535	gimple_call_initialize_ctrl_altering (stmt);
536
537      /* If the statement starts a new basic block or if we have determined
538	 in a previous pass that we need to create a new block for STMT, do
539	 so now.  */
540      if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
541	{
542	  if (!first_stmt_of_seq)
543	    gsi_split_seq_before (&i, &seq);
544	  bb = create_basic_block (seq, NULL, bb);
545	  start_new_block = false;
546	}
547
548      /* Now add STMT to BB and create the subgraphs for special statement
549	 codes.  */
550      gimple_set_bb (stmt, bb);
551
552      /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
553	 next iteration.  */
554      if (stmt_ends_bb_p (stmt))
555	{
556	  /* If the stmt can make abnormal goto use a new temporary
557	     for the assignment to the LHS.  This makes sure the old value
558	     of the LHS is available on the abnormal edge.  Otherwise
559	     we will end up with overlapping life-ranges for abnormal
560	     SSA names.  */
561	  if (gimple_has_lhs (stmt)
562	      && stmt_can_make_abnormal_goto (stmt)
563	      && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
564	    {
565	      tree lhs = gimple_get_lhs (stmt);
566	      tree tmp = create_tmp_var (TREE_TYPE (lhs));
567	      gimple s = gimple_build_assign (lhs, tmp);
568	      gimple_set_location (s, gimple_location (stmt));
569	      gimple_set_block (s, gimple_block (stmt));
570	      gimple_set_lhs (stmt, tmp);
571	      if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
572		  || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
573		DECL_GIMPLE_REG_P (tmp) = 1;
574	      gsi_insert_after (&i, s, GSI_SAME_STMT);
575	    }
576	  start_new_block = true;
577	}
578
579      gsi_next (&i);
580      first_stmt_of_seq = false;
581    }
582}
583
584
585/* Create and return a new empty basic block after bb AFTER.  */
586
587static basic_block
588create_bb (void *h, void *e, basic_block after)
589{
590  basic_block bb;
591
592  gcc_assert (!e);
593
594  /* Create and initialize a new basic block.  Since alloc_block uses
595     GC allocation that clears memory to allocate a basic block, we do
596     not have to clear the newly allocated basic block here.  */
597  bb = alloc_block ();
598
599  bb->index = last_basic_block_for_fn (cfun);
600  bb->flags = BB_NEW;
601  set_bb_seq (bb, h ? (gimple_seq) h : NULL);
602
603  /* Add the new block to the linked list of blocks.  */
604  link_block (bb, after);
605
606  /* Grow the basic block array if needed.  */
607  if ((size_t) last_basic_block_for_fn (cfun)
608      == basic_block_info_for_fn (cfun)->length ())
609    {
610      size_t new_size =
611	(last_basic_block_for_fn (cfun)
612	 + (last_basic_block_for_fn (cfun) + 3) / 4);
613      vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
614    }
615
616  /* Add the newly created block to the array.  */
617  SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
618
619  n_basic_blocks_for_fn (cfun)++;
620  last_basic_block_for_fn (cfun)++;
621
622  return bb;
623}
624
625
626/*---------------------------------------------------------------------------
627				 Edge creation
628---------------------------------------------------------------------------*/
629
630/* Fold COND_EXPR_COND of each COND_EXPR.  */
631
632void
633fold_cond_expr_cond (void)
634{
635  basic_block bb;
636
637  FOR_EACH_BB_FN (bb, cfun)
638    {
639      gimple stmt = last_stmt (bb);
640
641      if (stmt && gimple_code (stmt) == GIMPLE_COND)
642	{
643	  gcond *cond_stmt = as_a <gcond *> (stmt);
644	  location_t loc = gimple_location (stmt);
645	  tree cond;
646	  bool zerop, onep;
647
648	  fold_defer_overflow_warnings ();
649	  cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
650				  boolean_type_node,
651				  gimple_cond_lhs (cond_stmt),
652				  gimple_cond_rhs (cond_stmt));
653	  if (cond)
654	    {
655	      zerop = integer_zerop (cond);
656	      onep = integer_onep (cond);
657	    }
658	  else
659	    zerop = onep = false;
660
661	  fold_undefer_overflow_warnings (zerop || onep,
662					  stmt,
663					  WARN_STRICT_OVERFLOW_CONDITIONAL);
664	  if (zerop)
665	    gimple_cond_make_false (cond_stmt);
666	  else if (onep)
667	    gimple_cond_make_true (cond_stmt);
668	}
669    }
670}
671
672/* If basic block BB has an abnormal edge to a basic block
673   containing IFN_ABNORMAL_DISPATCHER internal call, return
674   that the dispatcher's basic block, otherwise return NULL.  */
675
676basic_block
677get_abnormal_succ_dispatcher (basic_block bb)
678{
679  edge e;
680  edge_iterator ei;
681
682  FOR_EACH_EDGE (e, ei, bb->succs)
683    if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
684      {
685	gimple_stmt_iterator gsi
686	  = gsi_start_nondebug_after_labels_bb (e->dest);
687	gimple g = gsi_stmt (gsi);
688	if (g
689	    && is_gimple_call (g)
690	    && gimple_call_internal_p (g)
691	    && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
692	  return e->dest;
693      }
694  return NULL;
695}
696
697/* Helper function for make_edges.  Create a basic block with
698   with ABNORMAL_DISPATCHER internal call in it if needed, and
699   create abnormal edges from BBS to it and from it to FOR_BB
700   if COMPUTED_GOTO is false, otherwise factor the computed gotos.  */
701
702static void
703handle_abnormal_edges (basic_block *dispatcher_bbs,
704		       basic_block for_bb, int *bb_to_omp_idx,
705		       auto_vec<basic_block> *bbs, bool computed_goto)
706{
707  basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
708  unsigned int idx = 0;
709  basic_block bb;
710  bool inner = false;
711
712  if (bb_to_omp_idx)
713    {
714      dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
715      if (bb_to_omp_idx[for_bb->index] != 0)
716	inner = true;
717    }
718
719  /* If the dispatcher has been created already, then there are basic
720     blocks with abnormal edges to it, so just make a new edge to
721     for_bb.  */
722  if (*dispatcher == NULL)
723    {
724      /* Check if there are any basic blocks that need to have
725	 abnormal edges to this dispatcher.  If there are none, return
726	 early.  */
727      if (bb_to_omp_idx == NULL)
728	{
729	  if (bbs->is_empty ())
730	    return;
731	}
732      else
733	{
734	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
735	    if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
736	      break;
737	  if (bb == NULL)
738	    return;
739	}
740
741      /* Create the dispatcher bb.  */
742      *dispatcher = create_basic_block (NULL, NULL, for_bb);
743      if (computed_goto)
744	{
745	  /* Factor computed gotos into a common computed goto site.  Also
746	     record the location of that site so that we can un-factor the
747	     gotos after we have converted back to normal form.  */
748	  gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
749
750	  /* Create the destination of the factored goto.  Each original
751	     computed goto will put its desired destination into this
752	     variable and jump to the label we create immediately below.  */
753	  tree var = create_tmp_var (ptr_type_node, "gotovar");
754
755	  /* Build a label for the new block which will contain the
756	     factored computed goto.  */
757	  tree factored_label_decl
758	    = create_artificial_label (UNKNOWN_LOCATION);
759	  gimple factored_computed_goto_label
760	    = gimple_build_label (factored_label_decl);
761	  gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
762
763	  /* Build our new computed goto.  */
764	  gimple factored_computed_goto = gimple_build_goto (var);
765	  gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
766
767	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
768	    {
769	      if (bb_to_omp_idx
770		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
771		continue;
772
773	      gsi = gsi_last_bb (bb);
774	      gimple last = gsi_stmt (gsi);
775
776	      gcc_assert (computed_goto_p (last));
777
778	      /* Copy the original computed goto's destination into VAR.  */
779	      gimple assignment
780		= gimple_build_assign (var, gimple_goto_dest (last));
781	      gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
782
783	      edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
784	      e->goto_locus = gimple_location (last);
785	      gsi_remove (&gsi, true);
786	    }
787	}
788      else
789	{
790	  tree arg = inner ? boolean_true_node : boolean_false_node;
791	  gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
792						 1, arg);
793	  gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
794	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
795
796	  /* Create predecessor edges of the dispatcher.  */
797	  FOR_EACH_VEC_ELT (*bbs, idx, bb)
798	    {
799	      if (bb_to_omp_idx
800		  && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801		continue;
802	      make_edge (bb, *dispatcher, EDGE_ABNORMAL);
803	    }
804	}
805    }
806
807  make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
808}
809
810/* Join all the blocks in the flowgraph.  */
811
812static void
813make_edges (void)
814{
815  basic_block bb;
816  struct omp_region *cur_region = NULL;
817  auto_vec<basic_block> ab_edge_goto;
818  auto_vec<basic_block> ab_edge_call;
819  int *bb_to_omp_idx = NULL;
820  int cur_omp_region_idx = 0;
821
822  /* Create an edge from entry to the first block with executable
823     statements in it.  */
824  make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
825	     BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
826	     EDGE_FALLTHRU);
827
828  /* Traverse the basic block array placing edges.  */
829  FOR_EACH_BB_FN (bb, cfun)
830    {
831      gimple last = last_stmt (bb);
832      bool fallthru;
833
834      if (bb_to_omp_idx)
835	bb_to_omp_idx[bb->index] = cur_omp_region_idx;
836
837      if (last)
838	{
839	  enum gimple_code code = gimple_code (last);
840	  switch (code)
841	    {
842	    case GIMPLE_GOTO:
843	      if (make_goto_expr_edges (bb))
844		ab_edge_goto.safe_push (bb);
845	      fallthru = false;
846	      break;
847	    case GIMPLE_RETURN:
848	      {
849		edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
850		e->goto_locus = gimple_location (last);
851		fallthru = false;
852	      }
853	      break;
854	    case GIMPLE_COND:
855	      make_cond_expr_edges (bb);
856	      fallthru = false;
857	      break;
858	    case GIMPLE_SWITCH:
859	      make_gimple_switch_edges (as_a <gswitch *> (last), bb);
860	      fallthru = false;
861	      break;
862	    case GIMPLE_RESX:
863	      make_eh_edges (last);
864	      fallthru = false;
865	      break;
866	    case GIMPLE_EH_DISPATCH:
867	      fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
868	      break;
869
870	    case GIMPLE_CALL:
871	      /* If this function receives a nonlocal goto, then we need to
872		 make edges from this call site to all the nonlocal goto
873		 handlers.  */
874	      if (stmt_can_make_abnormal_goto (last))
875		ab_edge_call.safe_push (bb);
876
877	      /* If this statement has reachable exception handlers, then
878		 create abnormal edges to them.  */
879	      make_eh_edges (last);
880
881	      /* BUILTIN_RETURN is really a return statement.  */
882	      if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
883		{
884		  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
885		  fallthru = false;
886		}
887	      /* Some calls are known not to return.  */
888	      else
889	        fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
890	      break;
891
892	    case GIMPLE_ASSIGN:
893	       /* A GIMPLE_ASSIGN may throw internally and thus be considered
894		  control-altering. */
895	      if (is_ctrl_altering_stmt (last))
896		make_eh_edges (last);
897	      fallthru = true;
898	      break;
899
900	    case GIMPLE_ASM:
901	      make_gimple_asm_edges (bb);
902	      fallthru = true;
903	      break;
904
905	    CASE_GIMPLE_OMP:
906	      fallthru = make_gimple_omp_edges (bb, &cur_region,
907						&cur_omp_region_idx);
908	      if (cur_region && bb_to_omp_idx == NULL)
909		bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
910	      break;
911
912	    case GIMPLE_TRANSACTION:
913	      {
914		tree abort_label
915		  = gimple_transaction_label (as_a <gtransaction *> (last));
916		if (abort_label)
917		  make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
918		fallthru = true;
919	      }
920	      break;
921
922	    default:
923	      gcc_assert (!stmt_ends_bb_p (last));
924	      fallthru = true;
925	    }
926	}
927      else
928	fallthru = true;
929
930      if (fallthru)
931	make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
932    }
933
934  /* Computed gotos are hell to deal with, especially if there are
935     lots of them with a large number of destinations.  So we factor
936     them to a common computed goto location before we build the
937     edge list.  After we convert back to normal form, we will un-factor
938     the computed gotos since factoring introduces an unwanted jump.
939     For non-local gotos and abnormal edges from calls to calls that return
940     twice or forced labels, factor the abnormal edges too, by having all
941     abnormal edges from the calls go to a common artificial basic block
942     with ABNORMAL_DISPATCHER internal call and abnormal edges from that
943     basic block to all forced labels and calls returning twice.
944     We do this per-OpenMP structured block, because those regions
945     are guaranteed to be single entry single exit by the standard,
946     so it is not allowed to enter or exit such regions abnormally this way,
947     thus all computed gotos, non-local gotos and setjmp/longjmp calls
948     must not transfer control across SESE region boundaries.  */
949  if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
950    {
951      gimple_stmt_iterator gsi;
952      basic_block dispatcher_bb_array[2] = { NULL, NULL };
953      basic_block *dispatcher_bbs = dispatcher_bb_array;
954      int count = n_basic_blocks_for_fn (cfun);
955
956      if (bb_to_omp_idx)
957	dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
958
959      FOR_EACH_BB_FN (bb, cfun)
960	{
961	  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
962	    {
963	      glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
964	      tree target;
965
966	      if (!label_stmt)
967		break;
968
969	      target = gimple_label_label (label_stmt);
970
971	      /* Make an edge to every label block that has been marked as a
972		 potential target for a computed goto or a non-local goto.  */
973	      if (FORCED_LABEL (target))
974		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
975				       &ab_edge_goto, true);
976	      if (DECL_NONLOCAL (target))
977		{
978		  handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
979					 &ab_edge_call, false);
980		  break;
981		}
982	    }
983
984	  if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
985	    gsi_next_nondebug (&gsi);
986	  if (!gsi_end_p (gsi))
987	    {
988	      /* Make an edge to every setjmp-like call.  */
989	      gimple call_stmt = gsi_stmt (gsi);
990	      if (is_gimple_call (call_stmt)
991		  && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
992		      || gimple_call_builtin_p (call_stmt,
993						BUILT_IN_SETJMP_RECEIVER)))
994		handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
995				       &ab_edge_call, false);
996	    }
997	}
998
999      if (bb_to_omp_idx)
1000	XDELETE (dispatcher_bbs);
1001    }
1002
1003  XDELETE (bb_to_omp_idx);
1004
1005  free_omp_regions ();
1006
1007  /* Fold COND_EXPR_COND of each COND_EXPR.  */
1008  fold_cond_expr_cond ();
1009}
1010
1011/* Find the next available discriminator value for LOCUS.  The
1012   discriminator distinguishes among several basic blocks that
1013   share a common locus, allowing for more accurate sample-based
1014   profiling.  */
1015
1016static int
1017next_discriminator_for_locus (location_t locus)
1018{
1019  struct locus_discrim_map item;
1020  struct locus_discrim_map **slot;
1021
1022  item.locus = locus;
1023  item.discriminator = 0;
1024  slot = discriminator_per_locus->find_slot_with_hash (
1025      &item, LOCATION_LINE (locus), INSERT);
1026  gcc_assert (slot);
1027  if (*slot == HTAB_EMPTY_ENTRY)
1028    {
1029      *slot = XNEW (struct locus_discrim_map);
1030      gcc_assert (*slot);
1031      (*slot)->locus = locus;
1032      (*slot)->discriminator = 0;
1033    }
1034  (*slot)->discriminator++;
1035  return (*slot)->discriminator;
1036}
1037
1038/* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line.  */
1039
1040static bool
1041same_line_p (location_t locus1, location_t locus2)
1042{
1043  expanded_location from, to;
1044
1045  if (locus1 == locus2)
1046    return true;
1047
1048  from = expand_location (locus1);
1049  to = expand_location (locus2);
1050
1051  if (from.line != to.line)
1052    return false;
1053  if (from.file == to.file)
1054    return true;
1055  return (from.file != NULL
1056          && to.file != NULL
1057          && filename_cmp (from.file, to.file) == 0);
1058}
1059
1060/* Assign discriminators to each basic block.  */
1061
1062static void
1063assign_discriminators (void)
1064{
1065  basic_block bb;
1066
1067  FOR_EACH_BB_FN (bb, cfun)
1068    {
1069      edge e;
1070      edge_iterator ei;
1071      gimple last = last_stmt (bb);
1072      location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1073
1074      if (locus == UNKNOWN_LOCATION)
1075	continue;
1076
1077      FOR_EACH_EDGE (e, ei, bb->succs)
1078	{
1079	  gimple first = first_non_label_stmt (e->dest);
1080	  gimple last = last_stmt (e->dest);
1081	  if ((first && same_line_p (locus, gimple_location (first)))
1082	      || (last && same_line_p (locus, gimple_location (last))))
1083	    {
1084	      if (e->dest->discriminator != 0 && bb->discriminator == 0)
1085		bb->discriminator = next_discriminator_for_locus (locus);
1086	      else
1087		e->dest->discriminator = next_discriminator_for_locus (locus);
1088	    }
1089	}
1090    }
1091}
1092
1093/* Create the edges for a GIMPLE_COND starting at block BB.  */
1094
1095static void
1096make_cond_expr_edges (basic_block bb)
1097{
1098  gcond *entry = as_a <gcond *> (last_stmt (bb));
1099  gimple then_stmt, else_stmt;
1100  basic_block then_bb, else_bb;
1101  tree then_label, else_label;
1102  edge e;
1103
1104  gcc_assert (entry);
1105  gcc_assert (gimple_code (entry) == GIMPLE_COND);
1106
1107  /* Entry basic blocks for each component.  */
1108  then_label = gimple_cond_true_label (entry);
1109  else_label = gimple_cond_false_label (entry);
1110  then_bb = label_to_block (then_label);
1111  else_bb = label_to_block (else_label);
1112  then_stmt = first_stmt (then_bb);
1113  else_stmt = first_stmt (else_bb);
1114
1115  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1116  e->goto_locus = gimple_location (then_stmt);
1117  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1118  if (e)
1119    e->goto_locus = gimple_location (else_stmt);
1120
1121  /* We do not need the labels anymore.  */
1122  gimple_cond_set_true_label (entry, NULL_TREE);
1123  gimple_cond_set_false_label (entry, NULL_TREE);
1124}
1125
1126
1127/* Called for each element in the hash table (P) as we delete the
1128   edge to cases hash table.
1129
1130   Clear all the TREE_CHAINs to prevent problems with copying of
1131   SWITCH_EXPRs and structure sharing rules, then free the hash table
1132   element.  */
1133
1134bool
1135edge_to_cases_cleanup (edge const &, tree const &value, void *)
1136{
1137  tree t, next;
1138
1139  for (t = value; t; t = next)
1140    {
1141      next = CASE_CHAIN (t);
1142      CASE_CHAIN (t) = NULL;
1143    }
1144
1145  return true;
1146}
1147
1148/* Start recording information mapping edges to case labels.  */
1149
1150void
1151start_recording_case_labels (void)
1152{
1153  gcc_assert (edge_to_cases == NULL);
1154  edge_to_cases = new hash_map<edge, tree>;
1155  touched_switch_bbs = BITMAP_ALLOC (NULL);
1156}
1157
1158/* Return nonzero if we are recording information for case labels.  */
1159
1160static bool
1161recording_case_labels_p (void)
1162{
1163  return (edge_to_cases != NULL);
1164}
1165
1166/* Stop recording information mapping edges to case labels and
1167   remove any information we have recorded.  */
1168void
1169end_recording_case_labels (void)
1170{
1171  bitmap_iterator bi;
1172  unsigned i;
1173  edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1174  delete edge_to_cases;
1175  edge_to_cases = NULL;
1176  EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1177    {
1178      basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1179      if (bb)
1180	{
1181	  gimple stmt = last_stmt (bb);
1182	  if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1183	    group_case_labels_stmt (as_a <gswitch *> (stmt));
1184	}
1185    }
1186  BITMAP_FREE (touched_switch_bbs);
1187}
1188
1189/* If we are inside a {start,end}_recording_cases block, then return
1190   a chain of CASE_LABEL_EXPRs from T which reference E.
1191
1192   Otherwise return NULL.  */
1193
1194static tree
1195get_cases_for_edge (edge e, gswitch *t)
1196{
1197  tree *slot;
1198  size_t i, n;
1199
1200  /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1201     chains available.  Return NULL so the caller can detect this case.  */
1202  if (!recording_case_labels_p ())
1203    return NULL;
1204
1205  slot = edge_to_cases->get (e);
1206  if (slot)
1207    return *slot;
1208
1209  /* If we did not find E in the hash table, then this must be the first
1210     time we have been queried for information about E & T.  Add all the
1211     elements from T to the hash table then perform the query again.  */
1212
1213  n = gimple_switch_num_labels (t);
1214  for (i = 0; i < n; i++)
1215    {
1216      tree elt = gimple_switch_label (t, i);
1217      tree lab = CASE_LABEL (elt);
1218      basic_block label_bb = label_to_block (lab);
1219      edge this_edge = find_edge (e->src, label_bb);
1220
1221      /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1222	 a new chain.  */
1223      tree &s = edge_to_cases->get_or_insert (this_edge);
1224      CASE_CHAIN (elt) = s;
1225      s = elt;
1226    }
1227
1228  return *edge_to_cases->get (e);
1229}
1230
1231/* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
1232
1233static void
1234make_gimple_switch_edges (gswitch *entry, basic_block bb)
1235{
1236  size_t i, n;
1237
1238  n = gimple_switch_num_labels (entry);
1239
1240  for (i = 0; i < n; ++i)
1241    {
1242      tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1243      basic_block label_bb = label_to_block (lab);
1244      make_edge (bb, label_bb, 0);
1245    }
1246}
1247
1248
1249/* Return the basic block holding label DEST.  */
1250
1251basic_block
1252label_to_block_fn (struct function *ifun, tree dest)
1253{
1254  int uid = LABEL_DECL_UID (dest);
1255
1256  /* We would die hard when faced by an undefined label.  Emit a label to
1257     the very first basic block.  This will hopefully make even the dataflow
1258     and undefined variable warnings quite right.  */
1259  if (seen_error () && uid < 0)
1260    {
1261      gimple_stmt_iterator gsi =
1262	gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1263      gimple stmt;
1264
1265      stmt = gimple_build_label (dest);
1266      gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1267      uid = LABEL_DECL_UID (dest);
1268    }
1269  if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1270    return NULL;
1271  return (*ifun->cfg->x_label_to_block_map)[uid];
1272}
1273
1274/* Create edges for a goto statement at block BB.  Returns true
1275   if abnormal edges should be created.  */
1276
1277static bool
1278make_goto_expr_edges (basic_block bb)
1279{
1280  gimple_stmt_iterator last = gsi_last_bb (bb);
1281  gimple goto_t = gsi_stmt (last);
1282
1283  /* A simple GOTO creates normal edges.  */
1284  if (simple_goto_p (goto_t))
1285    {
1286      tree dest = gimple_goto_dest (goto_t);
1287      basic_block label_bb = label_to_block (dest);
1288      edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1289      e->goto_locus = gimple_location (goto_t);
1290      gsi_remove (&last, true);
1291      return false;
1292    }
1293
1294  /* A computed GOTO creates abnormal edges.  */
1295  return true;
1296}
1297
1298/* Create edges for an asm statement with labels at block BB.  */
1299
1300static void
1301make_gimple_asm_edges (basic_block bb)
1302{
1303  gasm *stmt = as_a <gasm *> (last_stmt (bb));
1304  int i, n = gimple_asm_nlabels (stmt);
1305
1306  for (i = 0; i < n; ++i)
1307    {
1308      tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1309      basic_block label_bb = label_to_block (label);
1310      make_edge (bb, label_bb, 0);
1311    }
1312}
1313
1314/*---------------------------------------------------------------------------
1315			       Flowgraph analysis
1316---------------------------------------------------------------------------*/
1317
1318/* Cleanup useless labels in basic blocks.  This is something we wish
1319   to do early because it allows us to group case labels before creating
1320   the edges for the CFG, and it speeds up block statement iterators in
1321   all passes later on.
1322   We rerun this pass after CFG is created, to get rid of the labels that
1323   are no longer referenced.  After then we do not run it any more, since
1324   (almost) no new labels should be created.  */
1325
1326/* A map from basic block index to the leading label of that block.  */
1327static struct label_record
1328{
1329  /* The label.  */
1330  tree label;
1331
1332  /* True if the label is referenced from somewhere.  */
1333  bool used;
1334} *label_for_bb;
1335
1336/* Given LABEL return the first label in the same basic block.  */
1337
1338static tree
1339main_block_label (tree label)
1340{
1341  basic_block bb = label_to_block (label);
1342  tree main_label = label_for_bb[bb->index].label;
1343
1344  /* label_to_block possibly inserted undefined label into the chain.  */
1345  if (!main_label)
1346    {
1347      label_for_bb[bb->index].label = label;
1348      main_label = label;
1349    }
1350
1351  label_for_bb[bb->index].used = true;
1352  return main_label;
1353}
1354
1355/* Clean up redundant labels within the exception tree.  */
1356
1357static void
1358cleanup_dead_labels_eh (void)
1359{
1360  eh_landing_pad lp;
1361  eh_region r;
1362  tree lab;
1363  int i;
1364
1365  if (cfun->eh == NULL)
1366    return;
1367
1368  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1369    if (lp && lp->post_landing_pad)
1370      {
1371	lab = main_block_label (lp->post_landing_pad);
1372	if (lab != lp->post_landing_pad)
1373	  {
1374	    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1375	    EH_LANDING_PAD_NR (lab) = lp->index;
1376	  }
1377      }
1378
1379  FOR_ALL_EH_REGION (r)
1380    switch (r->type)
1381      {
1382      case ERT_CLEANUP:
1383      case ERT_MUST_NOT_THROW:
1384	break;
1385
1386      case ERT_TRY:
1387	{
1388	  eh_catch c;
1389	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1390	    {
1391	      lab = c->label;
1392	      if (lab)
1393		c->label = main_block_label (lab);
1394	    }
1395	}
1396	break;
1397
1398      case ERT_ALLOWED_EXCEPTIONS:
1399	lab = r->u.allowed.label;
1400	if (lab)
1401	  r->u.allowed.label = main_block_label (lab);
1402	break;
1403      }
1404}
1405
1406
1407/* Cleanup redundant labels.  This is a three-step process:
1408     1) Find the leading label for each block.
1409     2) Redirect all references to labels to the leading labels.
1410     3) Cleanup all useless labels.  */
1411
1412void
1413cleanup_dead_labels (void)
1414{
1415  basic_block bb;
1416  label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1417
1418  /* Find a suitable label for each block.  We use the first user-defined
1419     label if there is one, or otherwise just the first label we see.  */
1420  FOR_EACH_BB_FN (bb, cfun)
1421    {
1422      gimple_stmt_iterator i;
1423
1424      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1425	{
1426	  tree label;
1427	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1428
1429	  if (!label_stmt)
1430	    break;
1431
1432	  label = gimple_label_label (label_stmt);
1433
1434	  /* If we have not yet seen a label for the current block,
1435	     remember this one and see if there are more labels.  */
1436	  if (!label_for_bb[bb->index].label)
1437	    {
1438	      label_for_bb[bb->index].label = label;
1439	      continue;
1440	    }
1441
1442	  /* If we did see a label for the current block already, but it
1443	     is an artificially created label, replace it if the current
1444	     label is a user defined label.  */
1445	  if (!DECL_ARTIFICIAL (label)
1446	      && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1447	    {
1448	      label_for_bb[bb->index].label = label;
1449	      break;
1450	    }
1451	}
1452    }
1453
1454  /* Now redirect all jumps/branches to the selected label.
1455     First do so for each block ending in a control statement.  */
1456  FOR_EACH_BB_FN (bb, cfun)
1457    {
1458      gimple stmt = last_stmt (bb);
1459      tree label, new_label;
1460
1461      if (!stmt)
1462	continue;
1463
1464      switch (gimple_code (stmt))
1465	{
1466	case GIMPLE_COND:
1467	  {
1468	    gcond *cond_stmt = as_a <gcond *> (stmt);
1469	    label = gimple_cond_true_label (cond_stmt);
1470	    if (label)
1471	      {
1472		new_label = main_block_label (label);
1473		if (new_label != label)
1474		  gimple_cond_set_true_label (cond_stmt, new_label);
1475	      }
1476
1477	    label = gimple_cond_false_label (cond_stmt);
1478	    if (label)
1479	      {
1480		new_label = main_block_label (label);
1481		if (new_label != label)
1482		  gimple_cond_set_false_label (cond_stmt, new_label);
1483	      }
1484	  }
1485	  break;
1486
1487	case GIMPLE_SWITCH:
1488	  {
1489	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
1490	    size_t i, n = gimple_switch_num_labels (switch_stmt);
1491
1492	    /* Replace all destination labels.  */
1493	    for (i = 0; i < n; ++i)
1494	      {
1495		tree case_label = gimple_switch_label (switch_stmt, i);
1496		label = CASE_LABEL (case_label);
1497		new_label = main_block_label (label);
1498		if (new_label != label)
1499		  CASE_LABEL (case_label) = new_label;
1500	      }
1501	    break;
1502	  }
1503
1504	case GIMPLE_ASM:
1505	  {
1506	    gasm *asm_stmt = as_a <gasm *> (stmt);
1507	    int i, n = gimple_asm_nlabels (asm_stmt);
1508
1509	    for (i = 0; i < n; ++i)
1510	      {
1511		tree cons = gimple_asm_label_op (asm_stmt, i);
1512		tree label = main_block_label (TREE_VALUE (cons));
1513		TREE_VALUE (cons) = label;
1514	      }
1515	    break;
1516	  }
1517
1518	/* We have to handle gotos until they're removed, and we don't
1519	   remove them until after we've created the CFG edges.  */
1520	case GIMPLE_GOTO:
1521	  if (!computed_goto_p (stmt))
1522	    {
1523	      ggoto *goto_stmt = as_a <ggoto *> (stmt);
1524	      label = gimple_goto_dest (goto_stmt);
1525	      new_label = main_block_label (label);
1526	      if (new_label != label)
1527		gimple_goto_set_dest (goto_stmt, new_label);
1528	    }
1529	  break;
1530
1531	case GIMPLE_TRANSACTION:
1532	  {
1533	    gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1534	    tree label = gimple_transaction_label (trans_stmt);
1535	    if (label)
1536	      {
1537		tree new_label = main_block_label (label);
1538		if (new_label != label)
1539		  gimple_transaction_set_label (trans_stmt, new_label);
1540	      }
1541	  }
1542	  break;
1543
1544	default:
1545	  break;
1546      }
1547    }
1548
1549  /* Do the same for the exception region tree labels.  */
1550  cleanup_dead_labels_eh ();
1551
1552  /* Finally, purge dead labels.  All user-defined labels and labels that
1553     can be the target of non-local gotos and labels which have their
1554     address taken are preserved.  */
1555  FOR_EACH_BB_FN (bb, cfun)
1556    {
1557      gimple_stmt_iterator i;
1558      tree label_for_this_bb = label_for_bb[bb->index].label;
1559
1560      if (!label_for_this_bb)
1561	continue;
1562
1563      /* If the main label of the block is unused, we may still remove it.  */
1564      if (!label_for_bb[bb->index].used)
1565	label_for_this_bb = NULL;
1566
1567      for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1568	{
1569	  tree label;
1570	  glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1571
1572	  if (!label_stmt)
1573	    break;
1574
1575	  label = gimple_label_label (label_stmt);
1576
1577	  if (label == label_for_this_bb
1578	      || !DECL_ARTIFICIAL (label)
1579	      || DECL_NONLOCAL (label)
1580	      || FORCED_LABEL (label))
1581	    gsi_next (&i);
1582	  else
1583	    gsi_remove (&i, true);
1584	}
1585    }
1586
1587  free (label_for_bb);
1588}
1589
1590/* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1591   the ones jumping to the same label.
1592   Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
1593
1594void
1595group_case_labels_stmt (gswitch *stmt)
1596{
1597  int old_size = gimple_switch_num_labels (stmt);
1598  int i, j, new_size = old_size;
1599  basic_block default_bb = NULL;
1600
1601  default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1602
1603  /* Look for possible opportunities to merge cases.  */
1604  i = 1;
1605  while (i < old_size)
1606    {
1607      tree base_case, base_high;
1608      basic_block base_bb;
1609
1610      base_case = gimple_switch_label (stmt, i);
1611
1612      gcc_assert (base_case);
1613      base_bb = label_to_block (CASE_LABEL (base_case));
1614
1615      /* Discard cases that have the same destination as the
1616	 default case.  */
1617      if (base_bb == default_bb)
1618	{
1619	  gimple_switch_set_label (stmt, i, NULL_TREE);
1620	  i++;
1621	  new_size--;
1622	  continue;
1623	}
1624
1625      base_high = CASE_HIGH (base_case)
1626	  ? CASE_HIGH (base_case)
1627	  : CASE_LOW (base_case);
1628      i++;
1629
1630      /* Try to merge case labels.  Break out when we reach the end
1631	 of the label vector or when we cannot merge the next case
1632	 label with the current one.  */
1633      while (i < old_size)
1634	{
1635	  tree merge_case = gimple_switch_label (stmt, i);
1636	  basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1637	  wide_int bhp1 = wi::add (base_high, 1);
1638
1639	  /* Merge the cases if they jump to the same place,
1640	     and their ranges are consecutive.  */
1641	  if (merge_bb == base_bb
1642	      && wi::eq_p (CASE_LOW (merge_case), bhp1))
1643	    {
1644	      base_high = CASE_HIGH (merge_case) ?
1645		  CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1646	      CASE_HIGH (base_case) = base_high;
1647	      gimple_switch_set_label (stmt, i, NULL_TREE);
1648	      new_size--;
1649	      i++;
1650	    }
1651	  else
1652	    break;
1653	}
1654    }
1655
1656  /* Compress the case labels in the label vector, and adjust the
1657     length of the vector.  */
1658  for (i = 0, j = 0; i < new_size; i++)
1659    {
1660      while (! gimple_switch_label (stmt, j))
1661	j++;
1662      gimple_switch_set_label (stmt, i,
1663			       gimple_switch_label (stmt, j++));
1664    }
1665
1666  gcc_assert (new_size <= old_size);
1667  gimple_switch_set_num_labels (stmt, new_size);
1668}
1669
1670/* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1671   and scan the sorted vector of cases.  Combine the ones jumping to the
1672   same label.  */
1673
1674void
1675group_case_labels (void)
1676{
1677  basic_block bb;
1678
1679  FOR_EACH_BB_FN (bb, cfun)
1680    {
1681      gimple stmt = last_stmt (bb);
1682      if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1683	group_case_labels_stmt (as_a <gswitch *> (stmt));
1684    }
1685}
1686
1687/* Checks whether we can merge block B into block A.  */
1688
1689static bool
1690gimple_can_merge_blocks_p (basic_block a, basic_block b)
1691{
1692  gimple stmt;
1693
1694  if (!single_succ_p (a))
1695    return false;
1696
1697  if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1698    return false;
1699
1700  if (single_succ (a) != b)
1701    return false;
1702
1703  if (!single_pred_p (b))
1704    return false;
1705
1706  if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1707      || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1708    return false;
1709
1710  /* If A ends by a statement causing exceptions or something similar, we
1711     cannot merge the blocks.  */
1712  stmt = last_stmt (a);
1713  if (stmt && stmt_ends_bb_p (stmt))
1714    return false;
1715
1716  /* Do not allow a block with only a non-local label to be merged.  */
1717  if (stmt)
1718    if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1719      if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1720	return false;
1721
1722  /* Examine the labels at the beginning of B.  */
1723  for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1724       gsi_next (&gsi))
1725    {
1726      tree lab;
1727      glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1728      if (!label_stmt)
1729	break;
1730      lab = gimple_label_label (label_stmt);
1731
1732      /* Do not remove user forced labels or for -O0 any user labels.  */
1733      if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1734	return false;
1735    }
1736
1737  /* Protect simple loop latches.  We only want to avoid merging
1738     the latch with the loop header or with a block in another
1739     loop in this case.  */
1740  if (current_loops
1741      && b->loop_father->latch == b
1742      && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1743      && (b->loop_father->header == a
1744	  || b->loop_father != a->loop_father))
1745    return false;
1746
1747  /* It must be possible to eliminate all phi nodes in B.  If ssa form
1748     is not up-to-date and a name-mapping is registered, we cannot eliminate
1749     any phis.  Symbols marked for renaming are never a problem though.  */
1750  for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1751       gsi_next (&gsi))
1752    {
1753      gphi *phi = gsi.phi ();
1754      /* Technically only new names matter.  */
1755      if (name_registered_for_update_p (PHI_RESULT (phi)))
1756	return false;
1757    }
1758
1759  /* When not optimizing, don't merge if we'd lose goto_locus.  */
1760  if (!optimize
1761      && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1762    {
1763      location_t goto_locus = single_succ_edge (a)->goto_locus;
1764      gimple_stmt_iterator prev, next;
1765      prev = gsi_last_nondebug_bb (a);
1766      next = gsi_after_labels (b);
1767      if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1768	gsi_next_nondebug (&next);
1769      if ((gsi_end_p (prev)
1770	   || gimple_location (gsi_stmt (prev)) != goto_locus)
1771	  && (gsi_end_p (next)
1772	      || gimple_location (gsi_stmt (next)) != goto_locus))
1773	return false;
1774    }
1775
1776  return true;
1777}
1778
1779/* Replaces all uses of NAME by VAL.  */
1780
1781void
1782replace_uses_by (tree name, tree val)
1783{
1784  imm_use_iterator imm_iter;
1785  use_operand_p use;
1786  gimple stmt;
1787  edge e;
1788
1789  FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1790    {
1791      /* Mark the block if we change the last stmt in it.  */
1792      if (cfgcleanup_altered_bbs
1793	  && stmt_ends_bb_p (stmt))
1794	bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1795
1796      FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1797        {
1798	  replace_exp (use, val);
1799
1800	  if (gimple_code (stmt) == GIMPLE_PHI)
1801	    {
1802	      e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1803				       PHI_ARG_INDEX_FROM_USE (use));
1804	      if (e->flags & EDGE_ABNORMAL
1805		  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1806		{
1807		  /* This can only occur for virtual operands, since
1808		     for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1809		     would prevent replacement.  */
1810		  gcc_checking_assert (virtual_operand_p (name));
1811		  SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1812		}
1813	    }
1814	}
1815
1816      if (gimple_code (stmt) != GIMPLE_PHI)
1817	{
1818	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1819	  gimple orig_stmt = stmt;
1820	  size_t i;
1821
1822	  /* FIXME.  It shouldn't be required to keep TREE_CONSTANT
1823	     on ADDR_EXPRs up-to-date on GIMPLE.  Propagation will
1824	     only change sth from non-invariant to invariant, and only
1825	     when propagating constants.  */
1826	  if (is_gimple_min_invariant (val))
1827	    for (i = 0; i < gimple_num_ops (stmt); i++)
1828	      {
1829		tree op = gimple_op (stmt, i);
1830		/* Operands may be empty here.  For example, the labels
1831		   of a GIMPLE_COND are nulled out following the creation
1832		   of the corresponding CFG edges.  */
1833		if (op && TREE_CODE (op) == ADDR_EXPR)
1834		  recompute_tree_invariant_for_addr_expr (op);
1835	      }
1836
1837	  if (fold_stmt (&gsi))
1838	    stmt = gsi_stmt (gsi);
1839
1840	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1841	    gimple_purge_dead_eh_edges (gimple_bb (stmt));
1842
1843	  update_stmt (stmt);
1844	}
1845    }
1846
1847  gcc_checking_assert (has_zero_uses (name));
1848
1849  /* Also update the trees stored in loop structures.  */
1850  if (current_loops)
1851    {
1852      struct loop *loop;
1853
1854      FOR_EACH_LOOP (loop, 0)
1855	{
1856	  substitute_in_loop_info (loop, name, val);
1857	}
1858    }
1859}
1860
1861/* Merge block B into block A.  */
1862
1863static void
1864gimple_merge_blocks (basic_block a, basic_block b)
1865{
1866  gimple_stmt_iterator last, gsi;
1867  gphi_iterator psi;
1868
1869  if (dump_file)
1870    fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1871
1872  /* Remove all single-valued PHI nodes from block B of the form
1873     V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
1874  gsi = gsi_last_bb (a);
1875  for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1876    {
1877      gimple phi = gsi_stmt (psi);
1878      tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1879      gimple copy;
1880      bool may_replace_uses = (virtual_operand_p (def)
1881			       || may_propagate_copy (def, use));
1882
1883      /* In case we maintain loop closed ssa form, do not propagate arguments
1884	 of loop exit phi nodes.  */
1885      if (current_loops
1886	  && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1887	  && !virtual_operand_p (def)
1888	  && TREE_CODE (use) == SSA_NAME
1889	  && a->loop_father != b->loop_father)
1890	may_replace_uses = false;
1891
1892      if (!may_replace_uses)
1893	{
1894	  gcc_assert (!virtual_operand_p (def));
1895
1896	  /* Note that just emitting the copies is fine -- there is no problem
1897	     with ordering of phi nodes.  This is because A is the single
1898	     predecessor of B, therefore results of the phi nodes cannot
1899	     appear as arguments of the phi nodes.  */
1900	  copy = gimple_build_assign (def, use);
1901	  gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1902          remove_phi_node (&psi, false);
1903	}
1904      else
1905        {
1906	  /* If we deal with a PHI for virtual operands, we can simply
1907	     propagate these without fussing with folding or updating
1908	     the stmt.  */
1909	  if (virtual_operand_p (def))
1910	    {
1911	      imm_use_iterator iter;
1912	      use_operand_p use_p;
1913	      gimple stmt;
1914
1915	      FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1916		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1917		  SET_USE (use_p, use);
1918
1919	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1920		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1921	    }
1922	  else
1923            replace_uses_by (def, use);
1924
1925          remove_phi_node (&psi, true);
1926        }
1927    }
1928
1929  /* Ensure that B follows A.  */
1930  move_block_after (b, a);
1931
1932  gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1933  gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1934
1935  /* Remove labels from B and set gimple_bb to A for other statements.  */
1936  for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1937    {
1938      gimple stmt = gsi_stmt (gsi);
1939      if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1940	{
1941	  tree label = gimple_label_label (label_stmt);
1942	  int lp_nr;
1943
1944	  gsi_remove (&gsi, false);
1945
1946	  /* Now that we can thread computed gotos, we might have
1947	     a situation where we have a forced label in block B
1948	     However, the label at the start of block B might still be
1949	     used in other ways (think about the runtime checking for
1950	     Fortran assigned gotos).  So we can not just delete the
1951	     label.  Instead we move the label to the start of block A.  */
1952	  if (FORCED_LABEL (label))
1953	    {
1954	      gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1955	      gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1956	    }
1957	  /* Other user labels keep around in a form of a debug stmt.  */
1958	  else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1959	    {
1960	      gimple dbg = gimple_build_debug_bind (label,
1961						    integer_zero_node,
1962						    stmt);
1963	      gimple_debug_bind_reset_value (dbg);
1964	      gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1965	    }
1966
1967	  lp_nr = EH_LANDING_PAD_NR (label);
1968	  if (lp_nr)
1969	    {
1970	      eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1971	      lp->post_landing_pad = NULL;
1972	    }
1973	}
1974      else
1975	{
1976	  gimple_set_bb (stmt, a);
1977	  gsi_next (&gsi);
1978	}
1979    }
1980
1981  /* When merging two BBs, if their counts are different, the larger count
1982     is selected as the new bb count. This is to handle inconsistent
1983     profiles.  */
1984  if (a->loop_father == b->loop_father)
1985    {
1986      a->count = MAX (a->count, b->count);
1987      a->frequency = MAX (a->frequency, b->frequency);
1988    }
1989
1990  /* Merge the sequences.  */
1991  last = gsi_last_bb (a);
1992  gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1993  set_bb_seq (b, NULL);
1994
1995  if (cfgcleanup_altered_bbs)
1996    bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1997}
1998
1999
2000/* Return the one of two successors of BB that is not reachable by a
2001   complex edge, if there is one.  Else, return BB.  We use
2002   this in optimizations that use post-dominators for their heuristics,
2003   to catch the cases in C++ where function calls are involved.  */
2004
2005basic_block
2006single_noncomplex_succ (basic_block bb)
2007{
2008  edge e0, e1;
2009  if (EDGE_COUNT (bb->succs) != 2)
2010    return bb;
2011
2012  e0 = EDGE_SUCC (bb, 0);
2013  e1 = EDGE_SUCC (bb, 1);
2014  if (e0->flags & EDGE_COMPLEX)
2015    return e1->dest;
2016  if (e1->flags & EDGE_COMPLEX)
2017    return e0->dest;
2018
2019  return bb;
2020}
2021
2022/* T is CALL_EXPR.  Set current_function_calls_* flags.  */
2023
2024void
2025notice_special_calls (gcall *call)
2026{
2027  int flags = gimple_call_flags (call);
2028
2029  if (flags & ECF_MAY_BE_ALLOCA)
2030    cfun->calls_alloca = true;
2031  if (flags & ECF_RETURNS_TWICE)
2032    cfun->calls_setjmp = true;
2033}
2034
2035
2036/* Clear flags set by notice_special_calls.  Used by dead code removal
2037   to update the flags.  */
2038
2039void
2040clear_special_calls (void)
2041{
2042  cfun->calls_alloca = false;
2043  cfun->calls_setjmp = false;
2044}
2045
2046/* Remove PHI nodes associated with basic block BB and all edges out of BB.  */
2047
2048static void
2049remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2050{
2051  /* Since this block is no longer reachable, we can just delete all
2052     of its PHI nodes.  */
2053  remove_phi_nodes (bb);
2054
2055  /* Remove edges to BB's successors.  */
2056  while (EDGE_COUNT (bb->succs) > 0)
2057    remove_edge (EDGE_SUCC (bb, 0));
2058}
2059
2060
2061/* Remove statements of basic block BB.  */
2062
2063static void
2064remove_bb (basic_block bb)
2065{
2066  gimple_stmt_iterator i;
2067
2068  if (dump_file)
2069    {
2070      fprintf (dump_file, "Removing basic block %d\n", bb->index);
2071      if (dump_flags & TDF_DETAILS)
2072	{
2073	  dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2074	  fprintf (dump_file, "\n");
2075	}
2076    }
2077
2078  if (current_loops)
2079    {
2080      struct loop *loop = bb->loop_father;
2081
2082      /* If a loop gets removed, clean up the information associated
2083	 with it.  */
2084      if (loop->latch == bb
2085	  || loop->header == bb)
2086	free_numbers_of_iterations_estimates_loop (loop);
2087    }
2088
2089  /* Remove all the instructions in the block.  */
2090  if (bb_seq (bb) != NULL)
2091    {
2092      /* Walk backwards so as to get a chance to substitute all
2093	 released DEFs into debug stmts.  See
2094	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2095	 details.  */
2096      for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2097	{
2098	  gimple stmt = gsi_stmt (i);
2099	  glabel *label_stmt = dyn_cast <glabel *> (stmt);
2100	  if (label_stmt
2101	      && (FORCED_LABEL (gimple_label_label (label_stmt))
2102		  || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2103	    {
2104	      basic_block new_bb;
2105	      gimple_stmt_iterator new_gsi;
2106
2107	      /* A non-reachable non-local label may still be referenced.
2108		 But it no longer needs to carry the extra semantics of
2109		 non-locality.  */
2110	      if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2111		{
2112		  DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2113		  FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2114		}
2115
2116	      new_bb = bb->prev_bb;
2117	      new_gsi = gsi_start_bb (new_bb);
2118	      gsi_remove (&i, false);
2119	      gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2120	    }
2121	  else
2122	    {
2123	      /* Release SSA definitions if we are in SSA.  Note that we
2124		 may be called when not in SSA.  For example,
2125		 final_cleanup calls this function via
2126		 cleanup_tree_cfg.  */
2127	      if (gimple_in_ssa_p (cfun))
2128		release_defs (stmt);
2129
2130	      gsi_remove (&i, true);
2131	    }
2132
2133	  if (gsi_end_p (i))
2134	    i = gsi_last_bb (bb);
2135	  else
2136	    gsi_prev (&i);
2137	}
2138    }
2139
2140  remove_phi_nodes_and_edges_for_unreachable_block (bb);
2141  bb->il.gimple.seq = NULL;
2142  bb->il.gimple.phi_nodes = NULL;
2143}
2144
2145
2146/* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2147   predicate VAL, return the edge that will be taken out of the block.
2148   If VAL does not match a unique edge, NULL is returned.  */
2149
2150edge
2151find_taken_edge (basic_block bb, tree val)
2152{
2153  gimple stmt;
2154
2155  stmt = last_stmt (bb);
2156
2157  gcc_assert (stmt);
2158  gcc_assert (is_ctrl_stmt (stmt));
2159
2160  if (val == NULL)
2161    return NULL;
2162
2163  if (!is_gimple_min_invariant (val))
2164    return NULL;
2165
2166  if (gimple_code (stmt) == GIMPLE_COND)
2167    return find_taken_edge_cond_expr (bb, val);
2168
2169  if (gimple_code (stmt) == GIMPLE_SWITCH)
2170    return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2171
2172  if (computed_goto_p (stmt))
2173    {
2174      /* Only optimize if the argument is a label, if the argument is
2175	 not a label then we can not construct a proper CFG.
2176
2177         It may be the case that we only need to allow the LABEL_REF to
2178         appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2179         appear inside a LABEL_EXPR just to be safe.  */
2180      if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2181	  && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2182	return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2183      return NULL;
2184    }
2185
2186  gcc_unreachable ();
2187}
2188
2189/* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2190   statement, determine which of the outgoing edges will be taken out of the
2191   block.  Return NULL if either edge may be taken.  */
2192
2193static edge
2194find_taken_edge_computed_goto (basic_block bb, tree val)
2195{
2196  basic_block dest;
2197  edge e = NULL;
2198
2199  dest = label_to_block (val);
2200  if (dest)
2201    {
2202      e = find_edge (bb, dest);
2203      gcc_assert (e != NULL);
2204    }
2205
2206  return e;
2207}
2208
2209/* Given a constant value VAL and the entry block BB to a COND_EXPR
2210   statement, determine which of the two edges will be taken out of the
2211   block.  Return NULL if either edge may be taken.  */
2212
2213static edge
2214find_taken_edge_cond_expr (basic_block bb, tree val)
2215{
2216  edge true_edge, false_edge;
2217
2218  extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2219
2220  gcc_assert (TREE_CODE (val) == INTEGER_CST);
2221  return (integer_zerop (val) ? false_edge : true_edge);
2222}
2223
2224/* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2225   statement, determine which edge will be taken out of the block.  Return
2226   NULL if any edge may be taken.  */
2227
2228static edge
2229find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2230			     tree val)
2231{
2232  basic_block dest_bb;
2233  edge e;
2234  tree taken_case;
2235
2236  taken_case = find_case_label_for_value (switch_stmt, val);
2237  dest_bb = label_to_block (CASE_LABEL (taken_case));
2238
2239  e = find_edge (bb, dest_bb);
2240  gcc_assert (e);
2241  return e;
2242}
2243
2244
2245/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2246   We can make optimal use here of the fact that the case labels are
2247   sorted: We can do a binary search for a case matching VAL.  */
2248
2249static tree
2250find_case_label_for_value (gswitch *switch_stmt, tree val)
2251{
2252  size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2253  tree default_case = gimple_switch_default_label (switch_stmt);
2254
2255  for (low = 0, high = n; high - low > 1; )
2256    {
2257      size_t i = (high + low) / 2;
2258      tree t = gimple_switch_label (switch_stmt, i);
2259      int cmp;
2260
2261      /* Cache the result of comparing CASE_LOW and val.  */
2262      cmp = tree_int_cst_compare (CASE_LOW (t), val);
2263
2264      if (cmp > 0)
2265	high = i;
2266      else
2267	low = i;
2268
2269      if (CASE_HIGH (t) == NULL)
2270	{
2271	  /* A singe-valued case label.  */
2272	  if (cmp == 0)
2273	    return t;
2274	}
2275      else
2276	{
2277	  /* A case range.  We can only handle integer ranges.  */
2278	  if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2279	    return t;
2280	}
2281    }
2282
2283  return default_case;
2284}
2285
2286
2287/* Dump a basic block on stderr.  */
2288
2289void
2290gimple_debug_bb (basic_block bb)
2291{
2292  dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2293}
2294
2295
2296/* Dump basic block with index N on stderr.  */
2297
2298basic_block
2299gimple_debug_bb_n (int n)
2300{
2301  gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2302  return BASIC_BLOCK_FOR_FN (cfun, n);
2303}
2304
2305
2306/* Dump the CFG on stderr.
2307
2308   FLAGS are the same used by the tree dumping functions
2309   (see TDF_* in dumpfile.h).  */
2310
2311void
2312gimple_debug_cfg (int flags)
2313{
2314  gimple_dump_cfg (stderr, flags);
2315}
2316
2317
2318/* Dump the program showing basic block boundaries on the given FILE.
2319
2320   FLAGS are the same used by the tree dumping functions (see TDF_* in
2321   tree.h).  */
2322
2323void
2324gimple_dump_cfg (FILE *file, int flags)
2325{
2326  if (flags & TDF_DETAILS)
2327    {
2328      dump_function_header (file, current_function_decl, flags);
2329      fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2330	       n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2331	       last_basic_block_for_fn (cfun));
2332
2333      brief_dump_cfg (file, flags | TDF_COMMENT);
2334      fprintf (file, "\n");
2335    }
2336
2337  if (flags & TDF_STATS)
2338    dump_cfg_stats (file);
2339
2340  dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2341}
2342
2343
2344/* Dump CFG statistics on FILE.  */
2345
2346void
2347dump_cfg_stats (FILE *file)
2348{
2349  static long max_num_merged_labels = 0;
2350  unsigned long size, total = 0;
2351  long num_edges;
2352  basic_block bb;
2353  const char * const fmt_str   = "%-30s%-13s%12s\n";
2354  const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2355  const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2356  const char * const fmt_str_3 = "%-43s%11lu%c\n";
2357  const char *funcname = current_function_name ();
2358
2359  fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2360
2361  fprintf (file, "---------------------------------------------------------\n");
2362  fprintf (file, fmt_str, "", "  Number of  ", "Memory");
2363  fprintf (file, fmt_str, "", "  instances  ", "used ");
2364  fprintf (file, "---------------------------------------------------------\n");
2365
2366  size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2367  total += size;
2368  fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2369	   SCALE (size), LABEL (size));
2370
2371  num_edges = 0;
2372  FOR_EACH_BB_FN (bb, cfun)
2373    num_edges += EDGE_COUNT (bb->succs);
2374  size = num_edges * sizeof (struct edge_def);
2375  total += size;
2376  fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2377
2378  fprintf (file, "---------------------------------------------------------\n");
2379  fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2380	   LABEL (total));
2381  fprintf (file, "---------------------------------------------------------\n");
2382  fprintf (file, "\n");
2383
2384  if (cfg_stats.num_merged_labels > max_num_merged_labels)
2385    max_num_merged_labels = cfg_stats.num_merged_labels;
2386
2387  fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2388	   cfg_stats.num_merged_labels, max_num_merged_labels);
2389
2390  fprintf (file, "\n");
2391}
2392
2393
2394/* Dump CFG statistics on stderr.  Keep extern so that it's always
2395   linked in the final executable.  */
2396
2397DEBUG_FUNCTION void
2398debug_cfg_stats (void)
2399{
2400  dump_cfg_stats (stderr);
2401}
2402
2403/*---------------------------------------------------------------------------
2404			     Miscellaneous helpers
2405---------------------------------------------------------------------------*/
2406
2407/* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2408   flow.  Transfers of control flow associated with EH are excluded.  */
2409
2410static bool
2411call_can_make_abnormal_goto (gimple t)
2412{
2413  /* If the function has no non-local labels, then a call cannot make an
2414     abnormal transfer of control.  */
2415  if (!cfun->has_nonlocal_label
2416      && !cfun->calls_setjmp)
2417   return false;
2418
2419  /* Likewise if the call has no side effects.  */
2420  if (!gimple_has_side_effects (t))
2421    return false;
2422
2423  /* Likewise if the called function is leaf.  */
2424  if (gimple_call_flags (t) & ECF_LEAF)
2425    return false;
2426
2427  return true;
2428}
2429
2430
2431/* Return true if T can make an abnormal transfer of control flow.
2432   Transfers of control flow associated with EH are excluded.  */
2433
2434bool
2435stmt_can_make_abnormal_goto (gimple t)
2436{
2437  if (computed_goto_p (t))
2438    return true;
2439  if (is_gimple_call (t))
2440    return call_can_make_abnormal_goto (t);
2441  return false;
2442}
2443
2444
2445/* Return true if T represents a stmt that always transfers control.  */
2446
2447bool
2448is_ctrl_stmt (gimple t)
2449{
2450  switch (gimple_code (t))
2451    {
2452    case GIMPLE_COND:
2453    case GIMPLE_SWITCH:
2454    case GIMPLE_GOTO:
2455    case GIMPLE_RETURN:
2456    case GIMPLE_RESX:
2457      return true;
2458    default:
2459      return false;
2460    }
2461}
2462
2463
2464/* Return true if T is a statement that may alter the flow of control
2465   (e.g., a call to a non-returning function).  */
2466
2467bool
2468is_ctrl_altering_stmt (gimple t)
2469{
2470  gcc_assert (t);
2471
2472  switch (gimple_code (t))
2473    {
2474    case GIMPLE_CALL:
2475      /* Per stmt call flag indicates whether the call could alter
2476	 controlflow.  */
2477      if (gimple_call_ctrl_altering_p (t))
2478	return true;
2479      break;
2480
2481    case GIMPLE_EH_DISPATCH:
2482      /* EH_DISPATCH branches to the individual catch handlers at
2483	 this level of a try or allowed-exceptions region.  It can
2484	 fallthru to the next statement as well.  */
2485      return true;
2486
2487    case GIMPLE_ASM:
2488      if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2489	return true;
2490      break;
2491
2492    CASE_GIMPLE_OMP:
2493      /* OpenMP directives alter control flow.  */
2494      return true;
2495
2496    case GIMPLE_TRANSACTION:
2497      /* A transaction start alters control flow.  */
2498      return true;
2499
2500    default:
2501      break;
2502    }
2503
2504  /* If a statement can throw, it alters control flow.  */
2505  return stmt_can_throw_internal (t);
2506}
2507
2508
2509/* Return true if T is a simple local goto.  */
2510
2511bool
2512simple_goto_p (gimple t)
2513{
2514  return (gimple_code (t) == GIMPLE_GOTO
2515	  && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2516}
2517
2518
2519/* Return true if STMT should start a new basic block.  PREV_STMT is
2520   the statement preceding STMT.  It is used when STMT is a label or a
2521   case label.  Labels should only start a new basic block if their
2522   previous statement wasn't a label.  Otherwise, sequence of labels
2523   would generate unnecessary basic blocks that only contain a single
2524   label.  */
2525
2526static inline bool
2527stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2528{
2529  if (stmt == NULL)
2530    return false;
2531
2532  /* Labels start a new basic block only if the preceding statement
2533     wasn't a label of the same type.  This prevents the creation of
2534     consecutive blocks that have nothing but a single label.  */
2535  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2536    {
2537      /* Nonlocal and computed GOTO targets always start a new block.  */
2538      if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2539	  || FORCED_LABEL (gimple_label_label (label_stmt)))
2540	return true;
2541
2542      if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2543	{
2544	  if (DECL_NONLOCAL (gimple_label_label (
2545			       as_a <glabel *> (prev_stmt))))
2546	    return true;
2547
2548	  cfg_stats.num_merged_labels++;
2549	  return false;
2550	}
2551      else
2552	return true;
2553    }
2554  else if (gimple_code (stmt) == GIMPLE_CALL
2555	   && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2556    /* setjmp acts similar to a nonlocal GOTO target and thus should
2557       start a new block.  */
2558    return true;
2559
2560  return false;
2561}
2562
2563
2564/* Return true if T should end a basic block.  */
2565
2566bool
2567stmt_ends_bb_p (gimple t)
2568{
2569  return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2570}
2571
2572/* Remove block annotations and other data structures.  */
2573
2574void
2575delete_tree_cfg_annotations (void)
2576{
2577  vec_free (label_to_block_map_for_fn (cfun));
2578}
2579
2580
2581/* Return the first statement in basic block BB.  */
2582
2583gimple
2584first_stmt (basic_block bb)
2585{
2586  gimple_stmt_iterator i = gsi_start_bb (bb);
2587  gimple stmt = NULL;
2588
2589  while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2590    {
2591      gsi_next (&i);
2592      stmt = NULL;
2593    }
2594  return stmt;
2595}
2596
2597/* Return the first non-label statement in basic block BB.  */
2598
2599static gimple
2600first_non_label_stmt (basic_block bb)
2601{
2602  gimple_stmt_iterator i = gsi_start_bb (bb);
2603  while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2604    gsi_next (&i);
2605  return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2606}
2607
2608/* Return the last statement in basic block BB.  */
2609
2610gimple
2611last_stmt (basic_block bb)
2612{
2613  gimple_stmt_iterator i = gsi_last_bb (bb);
2614  gimple stmt = NULL;
2615
2616  while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2617    {
2618      gsi_prev (&i);
2619      stmt = NULL;
2620    }
2621  return stmt;
2622}
2623
2624/* Return the last statement of an otherwise empty block.  Return NULL
2625   if the block is totally empty, or if it contains more than one
2626   statement.  */
2627
2628gimple
2629last_and_only_stmt (basic_block bb)
2630{
2631  gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2632  gimple last, prev;
2633
2634  if (gsi_end_p (i))
2635    return NULL;
2636
2637  last = gsi_stmt (i);
2638  gsi_prev_nondebug (&i);
2639  if (gsi_end_p (i))
2640    return last;
2641
2642  /* Empty statements should no longer appear in the instruction stream.
2643     Everything that might have appeared before should be deleted by
2644     remove_useless_stmts, and the optimizers should just gsi_remove
2645     instead of smashing with build_empty_stmt.
2646
2647     Thus the only thing that should appear here in a block containing
2648     one executable statement is a label.  */
2649  prev = gsi_stmt (i);
2650  if (gimple_code (prev) == GIMPLE_LABEL)
2651    return last;
2652  else
2653    return NULL;
2654}
2655
2656/* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
2657
2658static void
2659reinstall_phi_args (edge new_edge, edge old_edge)
2660{
2661  edge_var_map *vm;
2662  int i;
2663  gphi_iterator phis;
2664
2665  vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2666  if (!v)
2667    return;
2668
2669  for (i = 0, phis = gsi_start_phis (new_edge->dest);
2670       v->iterate (i, &vm) && !gsi_end_p (phis);
2671       i++, gsi_next (&phis))
2672    {
2673      gphi *phi = phis.phi ();
2674      tree result = redirect_edge_var_map_result (vm);
2675      tree arg = redirect_edge_var_map_def (vm);
2676
2677      gcc_assert (result == gimple_phi_result (phi));
2678
2679      add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2680    }
2681
2682  redirect_edge_var_map_clear (old_edge);
2683}
2684
2685/* Returns the basic block after which the new basic block created
2686   by splitting edge EDGE_IN should be placed.  Tries to keep the new block
2687   near its "logical" location.  This is of most help to humans looking
2688   at debugging dumps.  */
2689
2690basic_block
2691split_edge_bb_loc (edge edge_in)
2692{
2693  basic_block dest = edge_in->dest;
2694  basic_block dest_prev = dest->prev_bb;
2695
2696  if (dest_prev)
2697    {
2698      edge e = find_edge (dest_prev, dest);
2699      if (e && !(e->flags & EDGE_COMPLEX))
2700	return edge_in->src;
2701    }
2702  return dest_prev;
2703}
2704
2705/* Split a (typically critical) edge EDGE_IN.  Return the new block.
2706   Abort on abnormal edges.  */
2707
2708static basic_block
2709gimple_split_edge (edge edge_in)
2710{
2711  basic_block new_bb, after_bb, dest;
2712  edge new_edge, e;
2713
2714  /* Abnormal edges cannot be split.  */
2715  gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2716
2717  dest = edge_in->dest;
2718
2719  after_bb = split_edge_bb_loc (edge_in);
2720
2721  new_bb = create_empty_bb (after_bb);
2722  new_bb->frequency = EDGE_FREQUENCY (edge_in);
2723  new_bb->count = edge_in->count;
2724  new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2725  new_edge->probability = REG_BR_PROB_BASE;
2726  new_edge->count = edge_in->count;
2727
2728  e = redirect_edge_and_branch (edge_in, new_bb);
2729  gcc_assert (e == edge_in);
2730  reinstall_phi_args (new_edge, e);
2731
2732  return new_bb;
2733}
2734
2735
2736/* Verify properties of the address expression T with base object BASE.  */
2737
2738static tree
2739verify_address (tree t, tree base)
2740{
2741  bool old_constant;
2742  bool old_side_effects;
2743  bool new_constant;
2744  bool new_side_effects;
2745
2746  old_constant = TREE_CONSTANT (t);
2747  old_side_effects = TREE_SIDE_EFFECTS (t);
2748
2749  recompute_tree_invariant_for_addr_expr (t);
2750  new_side_effects = TREE_SIDE_EFFECTS (t);
2751  new_constant = TREE_CONSTANT (t);
2752
2753  if (old_constant != new_constant)
2754    {
2755      error ("constant not recomputed when ADDR_EXPR changed");
2756      return t;
2757    }
2758  if (old_side_effects != new_side_effects)
2759    {
2760      error ("side effects not recomputed when ADDR_EXPR changed");
2761      return t;
2762    }
2763
2764  if (!(TREE_CODE (base) == VAR_DECL
2765	|| TREE_CODE (base) == PARM_DECL
2766	|| TREE_CODE (base) == RESULT_DECL))
2767    return NULL_TREE;
2768
2769  if (DECL_GIMPLE_REG_P (base))
2770    {
2771      error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2772      return base;
2773    }
2774
2775  return NULL_TREE;
2776}
2777
2778/* Callback for walk_tree, check that all elements with address taken are
2779   properly noticed as such.  The DATA is an int* that is 1 if TP was seen
2780   inside a PHI node.  */
2781
2782static tree
2783verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2784{
2785  tree t = *tp, x;
2786
2787  if (TYPE_P (t))
2788    *walk_subtrees = 0;
2789
2790  /* Check operand N for being valid GIMPLE and give error MSG if not.  */
2791#define CHECK_OP(N, MSG) \
2792  do { if (!is_gimple_val (TREE_OPERAND (t, N)))		\
2793       { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2794
2795  switch (TREE_CODE (t))
2796    {
2797    case SSA_NAME:
2798      if (SSA_NAME_IN_FREE_LIST (t))
2799	{
2800	  error ("SSA name in freelist but still referenced");
2801	  return *tp;
2802	}
2803      break;
2804
2805    case INDIRECT_REF:
2806      error ("INDIRECT_REF in gimple IL");
2807      return t;
2808
2809    case MEM_REF:
2810      x = TREE_OPERAND (t, 0);
2811      if (!POINTER_TYPE_P (TREE_TYPE (x))
2812	  || !is_gimple_mem_ref_addr (x))
2813	{
2814	  error ("invalid first operand of MEM_REF");
2815	  return x;
2816	}
2817      if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2818	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2819	{
2820	  error ("invalid offset operand of MEM_REF");
2821	  return TREE_OPERAND (t, 1);
2822	}
2823      if (TREE_CODE (x) == ADDR_EXPR
2824	  && (x = verify_address (x, TREE_OPERAND (x, 0))))
2825	return x;
2826      *walk_subtrees = 0;
2827      break;
2828
2829    case ASSERT_EXPR:
2830      x = fold (ASSERT_EXPR_COND (t));
2831      if (x == boolean_false_node)
2832	{
2833	  error ("ASSERT_EXPR with an always-false condition");
2834	  return *tp;
2835	}
2836      break;
2837
2838    case MODIFY_EXPR:
2839      error ("MODIFY_EXPR not expected while having tuples");
2840      return *tp;
2841
2842    case ADDR_EXPR:
2843      {
2844	tree tem;
2845
2846	gcc_assert (is_gimple_address (t));
2847
2848	/* Skip any references (they will be checked when we recurse down the
2849	   tree) and ensure that any variable used as a prefix is marked
2850	   addressable.  */
2851	for (x = TREE_OPERAND (t, 0);
2852	     handled_component_p (x);
2853	     x = TREE_OPERAND (x, 0))
2854	  ;
2855
2856	if ((tem = verify_address (t, x)))
2857	  return tem;
2858
2859	if (!(TREE_CODE (x) == VAR_DECL
2860	      || TREE_CODE (x) == PARM_DECL
2861	      || TREE_CODE (x) == RESULT_DECL))
2862	  return NULL;
2863
2864	if (!TREE_ADDRESSABLE (x))
2865	  {
2866	    error ("address taken, but ADDRESSABLE bit not set");
2867	    return x;
2868	  }
2869
2870	break;
2871      }
2872
2873    case COND_EXPR:
2874      x = COND_EXPR_COND (t);
2875      if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2876	{
2877	  error ("non-integral used in condition");
2878	  return x;
2879	}
2880      if (!is_gimple_condexpr (x))
2881        {
2882	  error ("invalid conditional operand");
2883	  return x;
2884	}
2885      break;
2886
2887    case NON_LVALUE_EXPR:
2888    case TRUTH_NOT_EXPR:
2889      gcc_unreachable ();
2890
2891    CASE_CONVERT:
2892    case FIX_TRUNC_EXPR:
2893    case FLOAT_EXPR:
2894    case NEGATE_EXPR:
2895    case ABS_EXPR:
2896    case BIT_NOT_EXPR:
2897      CHECK_OP (0, "invalid operand to unary operator");
2898      break;
2899
2900    case REALPART_EXPR:
2901    case IMAGPART_EXPR:
2902    case BIT_FIELD_REF:
2903      if (!is_gimple_reg_type (TREE_TYPE (t)))
2904	{
2905	  error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2906	  return t;
2907	}
2908
2909      if (TREE_CODE (t) == BIT_FIELD_REF)
2910	{
2911	  tree t0 = TREE_OPERAND (t, 0);
2912	  tree t1 = TREE_OPERAND (t, 1);
2913	  tree t2 = TREE_OPERAND (t, 2);
2914	  if (!tree_fits_uhwi_p (t1)
2915	      || !tree_fits_uhwi_p (t2))
2916	    {
2917	      error ("invalid position or size operand to BIT_FIELD_REF");
2918	      return t;
2919	    }
2920	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2921	      && (TYPE_PRECISION (TREE_TYPE (t))
2922		  != tree_to_uhwi (t1)))
2923	    {
2924	      error ("integral result type precision does not match "
2925		     "field size of BIT_FIELD_REF");
2926	      return t;
2927	    }
2928	  else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2929		   && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2930		   && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t)))
2931		       != tree_to_uhwi (t1)))
2932	    {
2933	      error ("mode size of non-integral result does not "
2934		     "match field size of BIT_FIELD_REF");
2935	      return t;
2936	    }
2937	  if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2938	      && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2939		  > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2940	    {
2941	      error ("position plus size exceeds size of referenced object in "
2942		     "BIT_FIELD_REF");
2943	      return t;
2944	    }
2945	}
2946      t = TREE_OPERAND (t, 0);
2947
2948      /* Fall-through.  */
2949    case COMPONENT_REF:
2950    case ARRAY_REF:
2951    case ARRAY_RANGE_REF:
2952    case VIEW_CONVERT_EXPR:
2953      /* We have a nest of references.  Verify that each of the operands
2954	 that determine where to reference is either a constant or a variable,
2955	 verify that the base is valid, and then show we've already checked
2956	 the subtrees.  */
2957      while (handled_component_p (t))
2958	{
2959	  if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2960	    CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2961	  else if (TREE_CODE (t) == ARRAY_REF
2962		   || TREE_CODE (t) == ARRAY_RANGE_REF)
2963	    {
2964	      CHECK_OP (1, "invalid array index");
2965	      if (TREE_OPERAND (t, 2))
2966		CHECK_OP (2, "invalid array lower bound");
2967	      if (TREE_OPERAND (t, 3))
2968		CHECK_OP (3, "invalid array stride");
2969	    }
2970	  else if (TREE_CODE (t) == BIT_FIELD_REF
2971		   || TREE_CODE (t) == REALPART_EXPR
2972		   || TREE_CODE (t) == IMAGPART_EXPR)
2973	    {
2974	      error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2975		     "REALPART_EXPR");
2976	      return t;
2977	    }
2978
2979	  t = TREE_OPERAND (t, 0);
2980	}
2981
2982      if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2983	{
2984	  error ("invalid reference prefix");
2985	  return t;
2986	}
2987      *walk_subtrees = 0;
2988      break;
2989    case PLUS_EXPR:
2990    case MINUS_EXPR:
2991      /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2992	 POINTER_PLUS_EXPR. */
2993      if (POINTER_TYPE_P (TREE_TYPE (t)))
2994	{
2995	  error ("invalid operand to plus/minus, type is a pointer");
2996	  return t;
2997	}
2998      CHECK_OP (0, "invalid operand to binary operator");
2999      CHECK_OP (1, "invalid operand to binary operator");
3000      break;
3001
3002    case POINTER_PLUS_EXPR:
3003      /* Check to make sure the first operand is a pointer or reference type. */
3004      if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3005	{
3006	  error ("invalid operand to pointer plus, first operand is not a pointer");
3007	  return t;
3008	}
3009      /* Check to make sure the second operand is a ptrofftype.  */
3010      if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3011	{
3012	  error ("invalid operand to pointer plus, second operand is not an "
3013		 "integer type of appropriate width");
3014	  return t;
3015	}
3016      /* FALLTHROUGH */
3017    case LT_EXPR:
3018    case LE_EXPR:
3019    case GT_EXPR:
3020    case GE_EXPR:
3021    case EQ_EXPR:
3022    case NE_EXPR:
3023    case UNORDERED_EXPR:
3024    case ORDERED_EXPR:
3025    case UNLT_EXPR:
3026    case UNLE_EXPR:
3027    case UNGT_EXPR:
3028    case UNGE_EXPR:
3029    case UNEQ_EXPR:
3030    case LTGT_EXPR:
3031    case MULT_EXPR:
3032    case TRUNC_DIV_EXPR:
3033    case CEIL_DIV_EXPR:
3034    case FLOOR_DIV_EXPR:
3035    case ROUND_DIV_EXPR:
3036    case TRUNC_MOD_EXPR:
3037    case CEIL_MOD_EXPR:
3038    case FLOOR_MOD_EXPR:
3039    case ROUND_MOD_EXPR:
3040    case RDIV_EXPR:
3041    case EXACT_DIV_EXPR:
3042    case MIN_EXPR:
3043    case MAX_EXPR:
3044    case LSHIFT_EXPR:
3045    case RSHIFT_EXPR:
3046    case LROTATE_EXPR:
3047    case RROTATE_EXPR:
3048    case BIT_IOR_EXPR:
3049    case BIT_XOR_EXPR:
3050    case BIT_AND_EXPR:
3051      CHECK_OP (0, "invalid operand to binary operator");
3052      CHECK_OP (1, "invalid operand to binary operator");
3053      break;
3054
3055    case CONSTRUCTOR:
3056      if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3057	*walk_subtrees = 0;
3058      break;
3059
3060    case CASE_LABEL_EXPR:
3061      if (CASE_CHAIN (t))
3062	{
3063	  error ("invalid CASE_CHAIN");
3064	  return t;
3065	}
3066      break;
3067
3068    default:
3069      break;
3070    }
3071  return NULL;
3072
3073#undef CHECK_OP
3074}
3075
3076
3077/* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3078   Returns true if there is an error, otherwise false.  */
3079
3080static bool
3081verify_types_in_gimple_min_lval (tree expr)
3082{
3083  tree op;
3084
3085  if (is_gimple_id (expr))
3086    return false;
3087
3088  if (TREE_CODE (expr) != TARGET_MEM_REF
3089      && TREE_CODE (expr) != MEM_REF)
3090    {
3091      error ("invalid expression for min lvalue");
3092      return true;
3093    }
3094
3095  /* TARGET_MEM_REFs are strange beasts.  */
3096  if (TREE_CODE (expr) == TARGET_MEM_REF)
3097    return false;
3098
3099  op = TREE_OPERAND (expr, 0);
3100  if (!is_gimple_val (op))
3101    {
3102      error ("invalid operand in indirect reference");
3103      debug_generic_stmt (op);
3104      return true;
3105    }
3106  /* Memory references now generally can involve a value conversion.  */
3107
3108  return false;
3109}
3110
3111/* Verify if EXPR is a valid GIMPLE reference expression.  If
3112   REQUIRE_LVALUE is true verifies it is an lvalue.  Returns true
3113   if there is an error, otherwise false.  */
3114
3115static bool
3116verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3117{
3118  while (handled_component_p (expr))
3119    {
3120      tree op = TREE_OPERAND (expr, 0);
3121
3122      if (TREE_CODE (expr) == ARRAY_REF
3123	  || TREE_CODE (expr) == ARRAY_RANGE_REF)
3124	{
3125	  if (!is_gimple_val (TREE_OPERAND (expr, 1))
3126	      || (TREE_OPERAND (expr, 2)
3127		  && !is_gimple_val (TREE_OPERAND (expr, 2)))
3128	      || (TREE_OPERAND (expr, 3)
3129		  && !is_gimple_val (TREE_OPERAND (expr, 3))))
3130	    {
3131	      error ("invalid operands to array reference");
3132	      debug_generic_stmt (expr);
3133	      return true;
3134	    }
3135	}
3136
3137      /* Verify if the reference array element types are compatible.  */
3138      if (TREE_CODE (expr) == ARRAY_REF
3139	  && !useless_type_conversion_p (TREE_TYPE (expr),
3140					 TREE_TYPE (TREE_TYPE (op))))
3141	{
3142	  error ("type mismatch in array reference");
3143	  debug_generic_stmt (TREE_TYPE (expr));
3144	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3145	  return true;
3146	}
3147      if (TREE_CODE (expr) == ARRAY_RANGE_REF
3148	  && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3149					 TREE_TYPE (TREE_TYPE (op))))
3150	{
3151	  error ("type mismatch in array range reference");
3152	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3153	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3154	  return true;
3155	}
3156
3157      if ((TREE_CODE (expr) == REALPART_EXPR
3158	   || TREE_CODE (expr) == IMAGPART_EXPR)
3159	  && !useless_type_conversion_p (TREE_TYPE (expr),
3160					 TREE_TYPE (TREE_TYPE (op))))
3161	{
3162	  error ("type mismatch in real/imagpart reference");
3163	  debug_generic_stmt (TREE_TYPE (expr));
3164	  debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3165	  return true;
3166	}
3167
3168      if (TREE_CODE (expr) == COMPONENT_REF
3169	  && !useless_type_conversion_p (TREE_TYPE (expr),
3170					 TREE_TYPE (TREE_OPERAND (expr, 1))))
3171	{
3172	  error ("type mismatch in component reference");
3173	  debug_generic_stmt (TREE_TYPE (expr));
3174	  debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3175	  return true;
3176	}
3177
3178      if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3179	{
3180	  /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3181	     that their operand is not an SSA name or an invariant when
3182	     requiring an lvalue (this usually means there is a SRA or IPA-SRA
3183	     bug).  Otherwise there is nothing to verify, gross mismatches at
3184	     most invoke undefined behavior.  */
3185	  if (require_lvalue
3186	      && (TREE_CODE (op) == SSA_NAME
3187		  || is_gimple_min_invariant (op)))
3188	    {
3189	      error ("conversion of an SSA_NAME on the left hand side");
3190	      debug_generic_stmt (expr);
3191	      return true;
3192	    }
3193	  else if (TREE_CODE (op) == SSA_NAME
3194		   && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3195	    {
3196	      error ("conversion of register to a different size");
3197	      debug_generic_stmt (expr);
3198	      return true;
3199	    }
3200	  else if (!handled_component_p (op))
3201	    return false;
3202	}
3203
3204      expr = op;
3205    }
3206
3207  if (TREE_CODE (expr) == MEM_REF)
3208    {
3209      if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3210	{
3211	  error ("invalid address operand in MEM_REF");
3212	  debug_generic_stmt (expr);
3213	  return true;
3214	}
3215      if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3216	  || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3217	{
3218	  error ("invalid offset operand in MEM_REF");
3219	  debug_generic_stmt (expr);
3220	  return true;
3221	}
3222    }
3223  else if (TREE_CODE (expr) == TARGET_MEM_REF)
3224    {
3225      if (!TMR_BASE (expr)
3226	  || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3227	{
3228	  error ("invalid address operand in TARGET_MEM_REF");
3229	  return true;
3230	}
3231      if (!TMR_OFFSET (expr)
3232	  || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3233	  || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3234	{
3235	  error ("invalid offset operand in TARGET_MEM_REF");
3236	  debug_generic_stmt (expr);
3237	  return true;
3238	}
3239    }
3240
3241  return ((require_lvalue || !is_gimple_min_invariant (expr))
3242	  && verify_types_in_gimple_min_lval (expr));
3243}
3244
3245/* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3246   list of pointer-to types that is trivially convertible to DEST.  */
3247
3248static bool
3249one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3250{
3251  tree src;
3252
3253  if (!TYPE_POINTER_TO (src_obj))
3254    return true;
3255
3256  for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3257    if (useless_type_conversion_p (dest, src))
3258      return true;
3259
3260  return false;
3261}
3262
3263/* Return true if TYPE1 is a fixed-point type and if conversions to and
3264   from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
3265
3266static bool
3267valid_fixed_convert_types_p (tree type1, tree type2)
3268{
3269  return (FIXED_POINT_TYPE_P (type1)
3270	  && (INTEGRAL_TYPE_P (type2)
3271	      || SCALAR_FLOAT_TYPE_P (type2)
3272	      || FIXED_POINT_TYPE_P (type2)));
3273}
3274
3275/* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
3276   is a problem, otherwise false.  */
3277
3278static bool
3279verify_gimple_call (gcall *stmt)
3280{
3281  tree fn = gimple_call_fn (stmt);
3282  tree fntype, fndecl;
3283  unsigned i;
3284
3285  if (gimple_call_internal_p (stmt))
3286    {
3287      if (fn)
3288	{
3289	  error ("gimple call has two targets");
3290	  debug_generic_stmt (fn);
3291	  return true;
3292	}
3293    }
3294  else
3295    {
3296      if (!fn)
3297	{
3298	  error ("gimple call has no target");
3299	  return true;
3300	}
3301    }
3302
3303  if (fn && !is_gimple_call_addr (fn))
3304    {
3305      error ("invalid function in gimple call");
3306      debug_generic_stmt (fn);
3307      return true;
3308    }
3309
3310  if (fn
3311      && (!POINTER_TYPE_P (TREE_TYPE (fn))
3312	  || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3313	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3314    {
3315      error ("non-function in gimple call");
3316      return true;
3317    }
3318
3319   fndecl = gimple_call_fndecl (stmt);
3320   if (fndecl
3321       && TREE_CODE (fndecl) == FUNCTION_DECL
3322       && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3323       && !DECL_PURE_P (fndecl)
3324       && !TREE_READONLY (fndecl))
3325     {
3326       error ("invalid pure const state for function");
3327       return true;
3328     }
3329
3330  if (gimple_call_lhs (stmt)
3331      && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3332	  || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3333    {
3334      error ("invalid LHS in gimple call");
3335      return true;
3336    }
3337
3338  if (gimple_call_ctrl_altering_p (stmt)
3339      && gimple_call_lhs (stmt)
3340      && gimple_call_noreturn_p (stmt))
3341    {
3342      error ("LHS in noreturn call");
3343      return true;
3344    }
3345
3346  fntype = gimple_call_fntype (stmt);
3347  if (fntype
3348      && gimple_call_lhs (stmt)
3349      && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3350				     TREE_TYPE (fntype))
3351      /* ???  At least C++ misses conversions at assignments from
3352	 void * call results.
3353	 ???  Java is completely off.  Especially with functions
3354	 returning java.lang.Object.
3355	 For now simply allow arbitrary pointer type conversions.  */
3356      && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3357	   && POINTER_TYPE_P (TREE_TYPE (fntype))))
3358    {
3359      error ("invalid conversion in gimple call");
3360      debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3361      debug_generic_stmt (TREE_TYPE (fntype));
3362      return true;
3363    }
3364
3365  if (gimple_call_chain (stmt)
3366      && !is_gimple_val (gimple_call_chain (stmt)))
3367    {
3368      error ("invalid static chain in gimple call");
3369      debug_generic_stmt (gimple_call_chain (stmt));
3370      return true;
3371    }
3372
3373  /* If there is a static chain argument, the call should either be
3374     indirect, or the decl should have DECL_STATIC_CHAIN set.  */
3375  if (gimple_call_chain (stmt)
3376      && fndecl
3377      && !DECL_STATIC_CHAIN (fndecl))
3378    {
3379      error ("static chain with function that doesn%'t use one");
3380      return true;
3381    }
3382
3383  /* ???  The C frontend passes unpromoted arguments in case it
3384     didn't see a function declaration before the call.  So for now
3385     leave the call arguments mostly unverified.  Once we gimplify
3386     unit-at-a-time we have a chance to fix this.  */
3387
3388  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3389    {
3390      tree arg = gimple_call_arg (stmt, i);
3391      if ((is_gimple_reg_type (TREE_TYPE (arg))
3392	   && !is_gimple_val (arg))
3393	  || (!is_gimple_reg_type (TREE_TYPE (arg))
3394	      && !is_gimple_lvalue (arg)))
3395	{
3396	  error ("invalid argument to gimple call");
3397	  debug_generic_expr (arg);
3398	  return true;
3399	}
3400    }
3401
3402  return false;
3403}
3404
3405/* Verifies the gimple comparison with the result type TYPE and
3406   the operands OP0 and OP1.  */
3407
3408static bool
3409verify_gimple_comparison (tree type, tree op0, tree op1)
3410{
3411  tree op0_type = TREE_TYPE (op0);
3412  tree op1_type = TREE_TYPE (op1);
3413
3414  if (!is_gimple_val (op0) || !is_gimple_val (op1))
3415    {
3416      error ("invalid operands in gimple comparison");
3417      return true;
3418    }
3419
3420  /* For comparisons we do not have the operations type as the
3421     effective type the comparison is carried out in.  Instead
3422     we require that either the first operand is trivially
3423     convertible into the second, or the other way around.
3424     Because we special-case pointers to void we allow
3425     comparisons of pointers with the same mode as well.  */
3426  if (!useless_type_conversion_p (op0_type, op1_type)
3427      && !useless_type_conversion_p (op1_type, op0_type)
3428      && (!POINTER_TYPE_P (op0_type)
3429	  || !POINTER_TYPE_P (op1_type)
3430	  || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3431    {
3432      error ("mismatching comparison operand types");
3433      debug_generic_expr (op0_type);
3434      debug_generic_expr (op1_type);
3435      return true;
3436    }
3437
3438  /* The resulting type of a comparison may be an effective boolean type.  */
3439  if (INTEGRAL_TYPE_P (type)
3440      && (TREE_CODE (type) == BOOLEAN_TYPE
3441	  || TYPE_PRECISION (type) == 1))
3442    {
3443      if (TREE_CODE (op0_type) == VECTOR_TYPE
3444	  || TREE_CODE (op1_type) == VECTOR_TYPE)
3445        {
3446          error ("vector comparison returning a boolean");
3447          debug_generic_expr (op0_type);
3448          debug_generic_expr (op1_type);
3449          return true;
3450        }
3451    }
3452  /* Or an integer vector type with the same size and element count
3453     as the comparison operand types.  */
3454  else if (TREE_CODE (type) == VECTOR_TYPE
3455	   && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3456    {
3457      if (TREE_CODE (op0_type) != VECTOR_TYPE
3458	  || TREE_CODE (op1_type) != VECTOR_TYPE)
3459        {
3460          error ("non-vector operands in vector comparison");
3461          debug_generic_expr (op0_type);
3462          debug_generic_expr (op1_type);
3463          return true;
3464        }
3465
3466      if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3467	  || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3468	      != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3469	  /* The result of a vector comparison is of signed
3470	     integral type.  */
3471	  || TYPE_UNSIGNED (TREE_TYPE (type)))
3472        {
3473          error ("invalid vector comparison resulting type");
3474          debug_generic_expr (type);
3475          return true;
3476        }
3477    }
3478  else
3479    {
3480      error ("bogus comparison result type");
3481      debug_generic_expr (type);
3482      return true;
3483    }
3484
3485  return false;
3486}
3487
3488/* Verify a gimple assignment statement STMT with an unary rhs.
3489   Returns true if anything is wrong.  */
3490
3491static bool
3492verify_gimple_assign_unary (gassign *stmt)
3493{
3494  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3495  tree lhs = gimple_assign_lhs (stmt);
3496  tree lhs_type = TREE_TYPE (lhs);
3497  tree rhs1 = gimple_assign_rhs1 (stmt);
3498  tree rhs1_type = TREE_TYPE (rhs1);
3499
3500  if (!is_gimple_reg (lhs))
3501    {
3502      error ("non-register as LHS of unary operation");
3503      return true;
3504    }
3505
3506  if (!is_gimple_val (rhs1))
3507    {
3508      error ("invalid operand in unary operation");
3509      return true;
3510    }
3511
3512  /* First handle conversions.  */
3513  switch (rhs_code)
3514    {
3515    CASE_CONVERT:
3516      {
3517	/* Allow conversions from pointer type to integral type only if
3518	   there is no sign or zero extension involved.
3519	   For targets were the precision of ptrofftype doesn't match that
3520	   of pointers we need to allow arbitrary conversions to ptrofftype.  */
3521	if ((POINTER_TYPE_P (lhs_type)
3522	     && INTEGRAL_TYPE_P (rhs1_type))
3523	    || (POINTER_TYPE_P (rhs1_type)
3524		&& INTEGRAL_TYPE_P (lhs_type)
3525		&& (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3526		    || ptrofftype_p (sizetype))))
3527	  return false;
3528
3529	/* Allow conversion from integral to offset type and vice versa.  */
3530	if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3531	     && INTEGRAL_TYPE_P (rhs1_type))
3532	    || (INTEGRAL_TYPE_P (lhs_type)
3533		&& TREE_CODE (rhs1_type) == OFFSET_TYPE))
3534	  return false;
3535
3536	/* Otherwise assert we are converting between types of the
3537	   same kind.  */
3538	if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3539	  {
3540	    error ("invalid types in nop conversion");
3541	    debug_generic_expr (lhs_type);
3542	    debug_generic_expr (rhs1_type);
3543	    return true;
3544	  }
3545
3546	return false;
3547      }
3548
3549    case ADDR_SPACE_CONVERT_EXPR:
3550      {
3551	if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3552	    || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3553		== TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3554	  {
3555	    error ("invalid types in address space conversion");
3556	    debug_generic_expr (lhs_type);
3557	    debug_generic_expr (rhs1_type);
3558	    return true;
3559	  }
3560
3561	return false;
3562      }
3563
3564    case FIXED_CONVERT_EXPR:
3565      {
3566	if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3567	    && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3568	  {
3569	    error ("invalid types in fixed-point conversion");
3570	    debug_generic_expr (lhs_type);
3571	    debug_generic_expr (rhs1_type);
3572	    return true;
3573	  }
3574
3575	return false;
3576      }
3577
3578    case FLOAT_EXPR:
3579      {
3580	if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3581	    && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3582	        || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3583	  {
3584	    error ("invalid types in conversion to floating point");
3585	    debug_generic_expr (lhs_type);
3586	    debug_generic_expr (rhs1_type);
3587	    return true;
3588	  }
3589
3590        return false;
3591      }
3592
3593    case FIX_TRUNC_EXPR:
3594      {
3595        if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3596            && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3597                || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3598	  {
3599	    error ("invalid types in conversion to integer");
3600	    debug_generic_expr (lhs_type);
3601	    debug_generic_expr (rhs1_type);
3602	    return true;
3603	  }
3604
3605        return false;
3606      }
3607    case REDUC_MAX_EXPR:
3608    case REDUC_MIN_EXPR:
3609    case REDUC_PLUS_EXPR:
3610      if (!VECTOR_TYPE_P (rhs1_type)
3611	  || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3612        {
3613	  error ("reduction should convert from vector to element type");
3614	  debug_generic_expr (lhs_type);
3615	  debug_generic_expr (rhs1_type);
3616	  return true;
3617	}
3618      return false;
3619
3620    case VEC_UNPACK_HI_EXPR:
3621    case VEC_UNPACK_LO_EXPR:
3622    case VEC_UNPACK_FLOAT_HI_EXPR:
3623    case VEC_UNPACK_FLOAT_LO_EXPR:
3624      /* FIXME.  */
3625      return false;
3626
3627    case NEGATE_EXPR:
3628    case ABS_EXPR:
3629    case BIT_NOT_EXPR:
3630    case PAREN_EXPR:
3631    case CONJ_EXPR:
3632      break;
3633
3634    default:
3635      gcc_unreachable ();
3636    }
3637
3638  /* For the remaining codes assert there is no conversion involved.  */
3639  if (!useless_type_conversion_p (lhs_type, rhs1_type))
3640    {
3641      error ("non-trivial conversion in unary operation");
3642      debug_generic_expr (lhs_type);
3643      debug_generic_expr (rhs1_type);
3644      return true;
3645    }
3646
3647  return false;
3648}
3649
3650/* Verify a gimple assignment statement STMT with a binary rhs.
3651   Returns true if anything is wrong.  */
3652
3653static bool
3654verify_gimple_assign_binary (gassign *stmt)
3655{
3656  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3657  tree lhs = gimple_assign_lhs (stmt);
3658  tree lhs_type = TREE_TYPE (lhs);
3659  tree rhs1 = gimple_assign_rhs1 (stmt);
3660  tree rhs1_type = TREE_TYPE (rhs1);
3661  tree rhs2 = gimple_assign_rhs2 (stmt);
3662  tree rhs2_type = TREE_TYPE (rhs2);
3663
3664  if (!is_gimple_reg (lhs))
3665    {
3666      error ("non-register as LHS of binary operation");
3667      return true;
3668    }
3669
3670  if (!is_gimple_val (rhs1)
3671      || !is_gimple_val (rhs2))
3672    {
3673      error ("invalid operands in binary operation");
3674      return true;
3675    }
3676
3677  /* First handle operations that involve different types.  */
3678  switch (rhs_code)
3679    {
3680    case COMPLEX_EXPR:
3681      {
3682	if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3683	    || !(INTEGRAL_TYPE_P (rhs1_type)
3684	         || SCALAR_FLOAT_TYPE_P (rhs1_type))
3685	    || !(INTEGRAL_TYPE_P (rhs2_type)
3686	         || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3687	  {
3688	    error ("type mismatch in complex expression");
3689	    debug_generic_expr (lhs_type);
3690	    debug_generic_expr (rhs1_type);
3691	    debug_generic_expr (rhs2_type);
3692	    return true;
3693	  }
3694
3695	return false;
3696      }
3697
3698    case LSHIFT_EXPR:
3699    case RSHIFT_EXPR:
3700    case LROTATE_EXPR:
3701    case RROTATE_EXPR:
3702      {
3703	/* Shifts and rotates are ok on integral types, fixed point
3704	   types and integer vector types.  */
3705	if ((!INTEGRAL_TYPE_P (rhs1_type)
3706	     && !FIXED_POINT_TYPE_P (rhs1_type)
3707	     && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3708		  && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3709	    || (!INTEGRAL_TYPE_P (rhs2_type)
3710		/* Vector shifts of vectors are also ok.  */
3711		&& !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3712		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3713		     && TREE_CODE (rhs2_type) == VECTOR_TYPE
3714		     && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3715	    || !useless_type_conversion_p (lhs_type, rhs1_type))
3716	  {
3717	    error ("type mismatch in shift expression");
3718	    debug_generic_expr (lhs_type);
3719	    debug_generic_expr (rhs1_type);
3720	    debug_generic_expr (rhs2_type);
3721	    return true;
3722	  }
3723
3724	return false;
3725      }
3726
3727    case WIDEN_LSHIFT_EXPR:
3728      {
3729        if (!INTEGRAL_TYPE_P (lhs_type)
3730            || !INTEGRAL_TYPE_P (rhs1_type)
3731            || TREE_CODE (rhs2) != INTEGER_CST
3732            || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3733          {
3734            error ("type mismatch in widening vector shift expression");
3735            debug_generic_expr (lhs_type);
3736            debug_generic_expr (rhs1_type);
3737            debug_generic_expr (rhs2_type);
3738            return true;
3739          }
3740
3741        return false;
3742      }
3743
3744    case VEC_WIDEN_LSHIFT_HI_EXPR:
3745    case VEC_WIDEN_LSHIFT_LO_EXPR:
3746      {
3747        if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3748            || TREE_CODE (lhs_type) != VECTOR_TYPE
3749            || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3750            || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3751            || TREE_CODE (rhs2) != INTEGER_CST
3752            || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3753                > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3754          {
3755            error ("type mismatch in widening vector shift expression");
3756            debug_generic_expr (lhs_type);
3757            debug_generic_expr (rhs1_type);
3758            debug_generic_expr (rhs2_type);
3759            return true;
3760          }
3761
3762        return false;
3763      }
3764
3765    case PLUS_EXPR:
3766    case MINUS_EXPR:
3767      {
3768	tree lhs_etype = lhs_type;
3769	tree rhs1_etype = rhs1_type;
3770	tree rhs2_etype = rhs2_type;
3771	if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3772	  {
3773	    if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3774		|| TREE_CODE (rhs2_type) != VECTOR_TYPE)
3775	      {
3776		error ("invalid non-vector operands to vector valued plus");
3777		return true;
3778	      }
3779	    lhs_etype = TREE_TYPE (lhs_type);
3780	    rhs1_etype = TREE_TYPE (rhs1_type);
3781	    rhs2_etype = TREE_TYPE (rhs2_type);
3782	  }
3783	if (POINTER_TYPE_P (lhs_etype)
3784	    || POINTER_TYPE_P (rhs1_etype)
3785	    || POINTER_TYPE_P (rhs2_etype))
3786	  {
3787	    error ("invalid (pointer) operands to plus/minus");
3788	    return true;
3789	  }
3790
3791	/* Continue with generic binary expression handling.  */
3792	break;
3793      }
3794
3795    case POINTER_PLUS_EXPR:
3796      {
3797	if (!POINTER_TYPE_P (rhs1_type)
3798	    || !useless_type_conversion_p (lhs_type, rhs1_type)
3799	    || !ptrofftype_p (rhs2_type))
3800	  {
3801	    error ("type mismatch in pointer plus expression");
3802	    debug_generic_stmt (lhs_type);
3803	    debug_generic_stmt (rhs1_type);
3804	    debug_generic_stmt (rhs2_type);
3805	    return true;
3806	  }
3807
3808	return false;
3809      }
3810
3811    case TRUTH_ANDIF_EXPR:
3812    case TRUTH_ORIF_EXPR:
3813    case TRUTH_AND_EXPR:
3814    case TRUTH_OR_EXPR:
3815    case TRUTH_XOR_EXPR:
3816
3817      gcc_unreachable ();
3818
3819    case LT_EXPR:
3820    case LE_EXPR:
3821    case GT_EXPR:
3822    case GE_EXPR:
3823    case EQ_EXPR:
3824    case NE_EXPR:
3825    case UNORDERED_EXPR:
3826    case ORDERED_EXPR:
3827    case UNLT_EXPR:
3828    case UNLE_EXPR:
3829    case UNGT_EXPR:
3830    case UNGE_EXPR:
3831    case UNEQ_EXPR:
3832    case LTGT_EXPR:
3833      /* Comparisons are also binary, but the result type is not
3834	 connected to the operand types.  */
3835      return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3836
3837    case WIDEN_MULT_EXPR:
3838      if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3839	return true;
3840      return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3841	      || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3842
3843    case WIDEN_SUM_EXPR:
3844    case VEC_WIDEN_MULT_HI_EXPR:
3845    case VEC_WIDEN_MULT_LO_EXPR:
3846    case VEC_WIDEN_MULT_EVEN_EXPR:
3847    case VEC_WIDEN_MULT_ODD_EXPR:
3848    case VEC_PACK_TRUNC_EXPR:
3849    case VEC_PACK_SAT_EXPR:
3850    case VEC_PACK_FIX_TRUNC_EXPR:
3851      /* FIXME.  */
3852      return false;
3853
3854    case MULT_EXPR:
3855    case MULT_HIGHPART_EXPR:
3856    case TRUNC_DIV_EXPR:
3857    case CEIL_DIV_EXPR:
3858    case FLOOR_DIV_EXPR:
3859    case ROUND_DIV_EXPR:
3860    case TRUNC_MOD_EXPR:
3861    case CEIL_MOD_EXPR:
3862    case FLOOR_MOD_EXPR:
3863    case ROUND_MOD_EXPR:
3864    case RDIV_EXPR:
3865    case EXACT_DIV_EXPR:
3866    case MIN_EXPR:
3867    case MAX_EXPR:
3868    case BIT_IOR_EXPR:
3869    case BIT_XOR_EXPR:
3870    case BIT_AND_EXPR:
3871      /* Continue with generic binary expression handling.  */
3872      break;
3873
3874    default:
3875      gcc_unreachable ();
3876    }
3877
3878  if (!useless_type_conversion_p (lhs_type, rhs1_type)
3879      || !useless_type_conversion_p (lhs_type, rhs2_type))
3880    {
3881      error ("type mismatch in binary expression");
3882      debug_generic_stmt (lhs_type);
3883      debug_generic_stmt (rhs1_type);
3884      debug_generic_stmt (rhs2_type);
3885      return true;
3886    }
3887
3888  return false;
3889}
3890
3891/* Verify a gimple assignment statement STMT with a ternary rhs.
3892   Returns true if anything is wrong.  */
3893
3894static bool
3895verify_gimple_assign_ternary (gassign *stmt)
3896{
3897  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3898  tree lhs = gimple_assign_lhs (stmt);
3899  tree lhs_type = TREE_TYPE (lhs);
3900  tree rhs1 = gimple_assign_rhs1 (stmt);
3901  tree rhs1_type = TREE_TYPE (rhs1);
3902  tree rhs2 = gimple_assign_rhs2 (stmt);
3903  tree rhs2_type = TREE_TYPE (rhs2);
3904  tree rhs3 = gimple_assign_rhs3 (stmt);
3905  tree rhs3_type = TREE_TYPE (rhs3);
3906
3907  if (!is_gimple_reg (lhs))
3908    {
3909      error ("non-register as LHS of ternary operation");
3910      return true;
3911    }
3912
3913  if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3914       ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3915      || !is_gimple_val (rhs2)
3916      || !is_gimple_val (rhs3))
3917    {
3918      error ("invalid operands in ternary operation");
3919      return true;
3920    }
3921
3922  /* First handle operations that involve different types.  */
3923  switch (rhs_code)
3924    {
3925    case WIDEN_MULT_PLUS_EXPR:
3926    case WIDEN_MULT_MINUS_EXPR:
3927      if ((!INTEGRAL_TYPE_P (rhs1_type)
3928	   && !FIXED_POINT_TYPE_P (rhs1_type))
3929	  || !useless_type_conversion_p (rhs1_type, rhs2_type)
3930	  || !useless_type_conversion_p (lhs_type, rhs3_type)
3931	  || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3932	  || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3933	{
3934	  error ("type mismatch in widening multiply-accumulate expression");
3935	  debug_generic_expr (lhs_type);
3936	  debug_generic_expr (rhs1_type);
3937	  debug_generic_expr (rhs2_type);
3938	  debug_generic_expr (rhs3_type);
3939	  return true;
3940	}
3941      break;
3942
3943    case FMA_EXPR:
3944      if (!useless_type_conversion_p (lhs_type, rhs1_type)
3945	  || !useless_type_conversion_p (lhs_type, rhs2_type)
3946	  || !useless_type_conversion_p (lhs_type, rhs3_type))
3947	{
3948	  error ("type mismatch in fused multiply-add expression");
3949	  debug_generic_expr (lhs_type);
3950	  debug_generic_expr (rhs1_type);
3951	  debug_generic_expr (rhs2_type);
3952	  debug_generic_expr (rhs3_type);
3953	  return true;
3954	}
3955      break;
3956
3957    case COND_EXPR:
3958    case VEC_COND_EXPR:
3959      if (!useless_type_conversion_p (lhs_type, rhs2_type)
3960	  || !useless_type_conversion_p (lhs_type, rhs3_type))
3961	{
3962	  error ("type mismatch in conditional expression");
3963	  debug_generic_expr (lhs_type);
3964	  debug_generic_expr (rhs2_type);
3965	  debug_generic_expr (rhs3_type);
3966	  return true;
3967	}
3968      break;
3969
3970    case VEC_PERM_EXPR:
3971      if (!useless_type_conversion_p (lhs_type, rhs1_type)
3972	  || !useless_type_conversion_p (lhs_type, rhs2_type))
3973	{
3974	  error ("type mismatch in vector permute expression");
3975	  debug_generic_expr (lhs_type);
3976	  debug_generic_expr (rhs1_type);
3977	  debug_generic_expr (rhs2_type);
3978	  debug_generic_expr (rhs3_type);
3979	  return true;
3980	}
3981
3982      if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3983	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
3984	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3985	{
3986	  error ("vector types expected in vector permute expression");
3987	  debug_generic_expr (lhs_type);
3988	  debug_generic_expr (rhs1_type);
3989	  debug_generic_expr (rhs2_type);
3990	  debug_generic_expr (rhs3_type);
3991	  return true;
3992	}
3993
3994      if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3995	  || TYPE_VECTOR_SUBPARTS (rhs2_type)
3996	     != TYPE_VECTOR_SUBPARTS (rhs3_type)
3997	  || TYPE_VECTOR_SUBPARTS (rhs3_type)
3998	     != TYPE_VECTOR_SUBPARTS (lhs_type))
3999	{
4000	  error ("vectors with different element number found "
4001		 "in vector permute expression");
4002	  debug_generic_expr (lhs_type);
4003	  debug_generic_expr (rhs1_type);
4004	  debug_generic_expr (rhs2_type);
4005	  debug_generic_expr (rhs3_type);
4006	  return true;
4007	}
4008
4009      if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4010	  || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4011	     != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4012	{
4013	  error ("invalid mask type in vector permute expression");
4014	  debug_generic_expr (lhs_type);
4015	  debug_generic_expr (rhs1_type);
4016	  debug_generic_expr (rhs2_type);
4017	  debug_generic_expr (rhs3_type);
4018	  return true;
4019	}
4020
4021      return false;
4022
4023    case SAD_EXPR:
4024      if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4025	  || !useless_type_conversion_p (lhs_type, rhs3_type)
4026	  || 2 * GET_MODE_BITSIZE (GET_MODE_INNER
4027				     (TYPE_MODE (TREE_TYPE (rhs1_type))))
4028	       > GET_MODE_BITSIZE (GET_MODE_INNER
4029				     (TYPE_MODE (TREE_TYPE (lhs_type)))))
4030	{
4031	  error ("type mismatch in sad expression");
4032	  debug_generic_expr (lhs_type);
4033	  debug_generic_expr (rhs1_type);
4034	  debug_generic_expr (rhs2_type);
4035	  debug_generic_expr (rhs3_type);
4036	  return true;
4037	}
4038
4039      if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4040	  || TREE_CODE (rhs2_type) != VECTOR_TYPE
4041	  || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4042	{
4043	  error ("vector types expected in sad expression");
4044	  debug_generic_expr (lhs_type);
4045	  debug_generic_expr (rhs1_type);
4046	  debug_generic_expr (rhs2_type);
4047	  debug_generic_expr (rhs3_type);
4048	  return true;
4049	}
4050
4051      return false;
4052
4053    case DOT_PROD_EXPR:
4054    case REALIGN_LOAD_EXPR:
4055      /* FIXME.  */
4056      return false;
4057
4058    default:
4059      gcc_unreachable ();
4060    }
4061  return false;
4062}
4063
4064/* Verify a gimple assignment statement STMT with a single rhs.
4065   Returns true if anything is wrong.  */
4066
4067static bool
4068verify_gimple_assign_single (gassign *stmt)
4069{
4070  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4071  tree lhs = gimple_assign_lhs (stmt);
4072  tree lhs_type = TREE_TYPE (lhs);
4073  tree rhs1 = gimple_assign_rhs1 (stmt);
4074  tree rhs1_type = TREE_TYPE (rhs1);
4075  bool res = false;
4076
4077  if (!useless_type_conversion_p (lhs_type, rhs1_type))
4078    {
4079      error ("non-trivial conversion at assignment");
4080      debug_generic_expr (lhs_type);
4081      debug_generic_expr (rhs1_type);
4082      return true;
4083    }
4084
4085  if (gimple_clobber_p (stmt)
4086      && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4087    {
4088      error ("non-decl/MEM_REF LHS in clobber statement");
4089      debug_generic_expr (lhs);
4090      return true;
4091    }
4092
4093  if (handled_component_p (lhs)
4094      || TREE_CODE (lhs) == MEM_REF
4095      || TREE_CODE (lhs) == TARGET_MEM_REF)
4096    res |= verify_types_in_gimple_reference (lhs, true);
4097
4098  /* Special codes we cannot handle via their class.  */
4099  switch (rhs_code)
4100    {
4101    case ADDR_EXPR:
4102      {
4103	tree op = TREE_OPERAND (rhs1, 0);
4104	if (!is_gimple_addressable (op))
4105	  {
4106	    error ("invalid operand in unary expression");
4107	    return true;
4108	  }
4109
4110	/* Technically there is no longer a need for matching types, but
4111	   gimple hygiene asks for this check.  In LTO we can end up
4112	   combining incompatible units and thus end up with addresses
4113	   of globals that change their type to a common one.  */
4114	if (!in_lto_p
4115	    && !types_compatible_p (TREE_TYPE (op),
4116				    TREE_TYPE (TREE_TYPE (rhs1)))
4117	    && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4118							  TREE_TYPE (op)))
4119	  {
4120	    error ("type mismatch in address expression");
4121	    debug_generic_stmt (TREE_TYPE (rhs1));
4122	    debug_generic_stmt (TREE_TYPE (op));
4123	    return true;
4124	  }
4125
4126	return verify_types_in_gimple_reference (op, true);
4127      }
4128
4129    /* tcc_reference  */
4130    case INDIRECT_REF:
4131      error ("INDIRECT_REF in gimple IL");
4132      return true;
4133
4134    case COMPONENT_REF:
4135    case BIT_FIELD_REF:
4136    case ARRAY_REF:
4137    case ARRAY_RANGE_REF:
4138    case VIEW_CONVERT_EXPR:
4139    case REALPART_EXPR:
4140    case IMAGPART_EXPR:
4141    case TARGET_MEM_REF:
4142    case MEM_REF:
4143      if (!is_gimple_reg (lhs)
4144	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4145	{
4146	  error ("invalid rhs for gimple memory store");
4147	  debug_generic_stmt (lhs);
4148	  debug_generic_stmt (rhs1);
4149	  return true;
4150	}
4151      return res || verify_types_in_gimple_reference (rhs1, false);
4152
4153    /* tcc_constant  */
4154    case SSA_NAME:
4155    case INTEGER_CST:
4156    case REAL_CST:
4157    case FIXED_CST:
4158    case COMPLEX_CST:
4159    case VECTOR_CST:
4160    case STRING_CST:
4161      return res;
4162
4163    /* tcc_declaration  */
4164    case CONST_DECL:
4165      return res;
4166    case VAR_DECL:
4167    case PARM_DECL:
4168      if (!is_gimple_reg (lhs)
4169	  && !is_gimple_reg (rhs1)
4170	  && is_gimple_reg_type (TREE_TYPE (lhs)))
4171	{
4172	  error ("invalid rhs for gimple memory store");
4173	  debug_generic_stmt (lhs);
4174	  debug_generic_stmt (rhs1);
4175	  return true;
4176	}
4177      return res;
4178
4179    case CONSTRUCTOR:
4180      if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4181	{
4182	  unsigned int i;
4183	  tree elt_i, elt_v, elt_t = NULL_TREE;
4184
4185	  if (CONSTRUCTOR_NELTS (rhs1) == 0)
4186	    return res;
4187	  /* For vector CONSTRUCTORs we require that either it is empty
4188	     CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4189	     (then the element count must be correct to cover the whole
4190	     outer vector and index must be NULL on all elements, or it is
4191	     a CONSTRUCTOR of scalar elements, where we as an exception allow
4192	     smaller number of elements (assuming zero filling) and
4193	     consecutive indexes as compared to NULL indexes (such
4194	     CONSTRUCTORs can appear in the IL from FEs).  */
4195	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4196	    {
4197	      if (elt_t == NULL_TREE)
4198		{
4199		  elt_t = TREE_TYPE (elt_v);
4200		  if (TREE_CODE (elt_t) == VECTOR_TYPE)
4201		    {
4202		      tree elt_t = TREE_TYPE (elt_v);
4203		      if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4204						      TREE_TYPE (elt_t)))
4205			{
4206			  error ("incorrect type of vector CONSTRUCTOR"
4207				 " elements");
4208			  debug_generic_stmt (rhs1);
4209			  return true;
4210			}
4211		      else if (CONSTRUCTOR_NELTS (rhs1)
4212			       * TYPE_VECTOR_SUBPARTS (elt_t)
4213			       != TYPE_VECTOR_SUBPARTS (rhs1_type))
4214			{
4215			  error ("incorrect number of vector CONSTRUCTOR"
4216				 " elements");
4217			  debug_generic_stmt (rhs1);
4218			  return true;
4219			}
4220		    }
4221		  else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4222						       elt_t))
4223		    {
4224		      error ("incorrect type of vector CONSTRUCTOR elements");
4225		      debug_generic_stmt (rhs1);
4226		      return true;
4227		    }
4228		  else if (CONSTRUCTOR_NELTS (rhs1)
4229			   > TYPE_VECTOR_SUBPARTS (rhs1_type))
4230		    {
4231		      error ("incorrect number of vector CONSTRUCTOR elements");
4232		      debug_generic_stmt (rhs1);
4233		      return true;
4234		    }
4235		}
4236	      else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4237		{
4238		  error ("incorrect type of vector CONSTRUCTOR elements");
4239		  debug_generic_stmt (rhs1);
4240		  return true;
4241		}
4242	      if (elt_i != NULL_TREE
4243		  && (TREE_CODE (elt_t) == VECTOR_TYPE
4244		      || TREE_CODE (elt_i) != INTEGER_CST
4245		      || compare_tree_int (elt_i, i) != 0))
4246		{
4247		  error ("vector CONSTRUCTOR with non-NULL element index");
4248		  debug_generic_stmt (rhs1);
4249		  return true;
4250		}
4251	      if (!is_gimple_val (elt_v))
4252		{
4253		  error ("vector CONSTRUCTOR element is not a GIMPLE value");
4254		  debug_generic_stmt (rhs1);
4255		  return true;
4256		}
4257	    }
4258	}
4259      else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4260	{
4261	  error ("non-vector CONSTRUCTOR with elements");
4262	  debug_generic_stmt (rhs1);
4263	  return true;
4264	}
4265      return res;
4266    case OBJ_TYPE_REF:
4267    case ASSERT_EXPR:
4268    case WITH_SIZE_EXPR:
4269      /* FIXME.  */
4270      return res;
4271
4272    default:;
4273    }
4274
4275  return res;
4276}
4277
4278/* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
4279   is a problem, otherwise false.  */
4280
4281static bool
4282verify_gimple_assign (gassign *stmt)
4283{
4284  switch (gimple_assign_rhs_class (stmt))
4285    {
4286    case GIMPLE_SINGLE_RHS:
4287      return verify_gimple_assign_single (stmt);
4288
4289    case GIMPLE_UNARY_RHS:
4290      return verify_gimple_assign_unary (stmt);
4291
4292    case GIMPLE_BINARY_RHS:
4293      return verify_gimple_assign_binary (stmt);
4294
4295    case GIMPLE_TERNARY_RHS:
4296      return verify_gimple_assign_ternary (stmt);
4297
4298    default:
4299      gcc_unreachable ();
4300    }
4301}
4302
4303/* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
4304   is a problem, otherwise false.  */
4305
4306static bool
4307verify_gimple_return (greturn *stmt)
4308{
4309  tree op = gimple_return_retval (stmt);
4310  tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4311
4312  /* We cannot test for present return values as we do not fix up missing
4313     return values from the original source.  */
4314  if (op == NULL)
4315    return false;
4316
4317  if (!is_gimple_val (op)
4318      && TREE_CODE (op) != RESULT_DECL)
4319    {
4320      error ("invalid operand in return statement");
4321      debug_generic_stmt (op);
4322      return true;
4323    }
4324
4325  if ((TREE_CODE (op) == RESULT_DECL
4326       && DECL_BY_REFERENCE (op))
4327      || (TREE_CODE (op) == SSA_NAME
4328	  && SSA_NAME_VAR (op)
4329	  && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4330	  && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4331    op = TREE_TYPE (op);
4332
4333  if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4334    {
4335      error ("invalid conversion in return statement");
4336      debug_generic_stmt (restype);
4337      debug_generic_stmt (TREE_TYPE (op));
4338      return true;
4339    }
4340
4341  return false;
4342}
4343
4344
4345/* Verify the contents of a GIMPLE_GOTO STMT.  Returns true when there
4346   is a problem, otherwise false.  */
4347
4348static bool
4349verify_gimple_goto (ggoto *stmt)
4350{
4351  tree dest = gimple_goto_dest (stmt);
4352
4353  /* ???  We have two canonical forms of direct goto destinations, a
4354     bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL.  */
4355  if (TREE_CODE (dest) != LABEL_DECL
4356      && (!is_gimple_val (dest)
4357	  || !POINTER_TYPE_P (TREE_TYPE (dest))))
4358    {
4359      error ("goto destination is neither a label nor a pointer");
4360      return true;
4361    }
4362
4363  return false;
4364}
4365
4366/* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
4367   is a problem, otherwise false.  */
4368
4369static bool
4370verify_gimple_switch (gswitch *stmt)
4371{
4372  unsigned int i, n;
4373  tree elt, prev_upper_bound = NULL_TREE;
4374  tree index_type, elt_type = NULL_TREE;
4375
4376  if (!is_gimple_val (gimple_switch_index (stmt)))
4377    {
4378      error ("invalid operand to switch statement");
4379      debug_generic_stmt (gimple_switch_index (stmt));
4380      return true;
4381    }
4382
4383  index_type = TREE_TYPE (gimple_switch_index (stmt));
4384  if (! INTEGRAL_TYPE_P (index_type))
4385    {
4386      error ("non-integral type switch statement");
4387      debug_generic_expr (index_type);
4388      return true;
4389    }
4390
4391  elt = gimple_switch_label (stmt, 0);
4392  if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4393    {
4394      error ("invalid default case label in switch statement");
4395      debug_generic_expr (elt);
4396      return true;
4397    }
4398
4399  n = gimple_switch_num_labels (stmt);
4400  for (i = 1; i < n; i++)
4401    {
4402      elt = gimple_switch_label (stmt, i);
4403
4404      if (! CASE_LOW (elt))
4405	{
4406	  error ("invalid case label in switch statement");
4407	  debug_generic_expr (elt);
4408	  return true;
4409	}
4410      if (CASE_HIGH (elt)
4411	  && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4412	{
4413	  error ("invalid case range in switch statement");
4414	  debug_generic_expr (elt);
4415	  return true;
4416	}
4417
4418      if (elt_type)
4419	{
4420	  if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4421	      || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4422	    {
4423	      error ("type mismatch for case label in switch statement");
4424	      debug_generic_expr (elt);
4425	      return true;
4426	    }
4427	}
4428      else
4429	{
4430	  elt_type = TREE_TYPE (CASE_LOW (elt));
4431	  if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4432	    {
4433	      error ("type precision mismatch in switch statement");
4434	      return true;
4435	    }
4436	}
4437
4438      if (prev_upper_bound)
4439	{
4440	  if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4441	    {
4442	      error ("case labels not sorted in switch statement");
4443	      return true;
4444	    }
4445	}
4446
4447      prev_upper_bound = CASE_HIGH (elt);
4448      if (! prev_upper_bound)
4449	prev_upper_bound = CASE_LOW (elt);
4450    }
4451
4452  return false;
4453}
4454
4455/* Verify a gimple debug statement STMT.
4456   Returns true if anything is wrong.  */
4457
4458static bool
4459verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4460{
4461  /* There isn't much that could be wrong in a gimple debug stmt.  A
4462     gimple debug bind stmt, for example, maps a tree, that's usually
4463     a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4464     component or member of an aggregate type, to another tree, that
4465     can be an arbitrary expression.  These stmts expand into debug
4466     insns, and are converted to debug notes by var-tracking.c.  */
4467  return false;
4468}
4469
4470/* Verify a gimple label statement STMT.
4471   Returns true if anything is wrong.  */
4472
4473static bool
4474verify_gimple_label (glabel *stmt)
4475{
4476  tree decl = gimple_label_label (stmt);
4477  int uid;
4478  bool err = false;
4479
4480  if (TREE_CODE (decl) != LABEL_DECL)
4481    return true;
4482  if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4483      && DECL_CONTEXT (decl) != current_function_decl)
4484    {
4485      error ("label's context is not the current function decl");
4486      err |= true;
4487    }
4488
4489  uid = LABEL_DECL_UID (decl);
4490  if (cfun->cfg
4491      && (uid == -1
4492	  || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4493    {
4494      error ("incorrect entry in label_to_block_map");
4495      err |= true;
4496    }
4497
4498  uid = EH_LANDING_PAD_NR (decl);
4499  if (uid)
4500    {
4501      eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4502      if (decl != lp->post_landing_pad)
4503	{
4504	  error ("incorrect setting of landing pad number");
4505	  err |= true;
4506	}
4507    }
4508
4509  return err;
4510}
4511
4512/* Verify a gimple cond statement STMT.
4513   Returns true if anything is wrong.  */
4514
4515static bool
4516verify_gimple_cond (gcond *stmt)
4517{
4518  if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4519    {
4520      error ("invalid comparison code in gimple cond");
4521      return true;
4522    }
4523  if (!(!gimple_cond_true_label (stmt)
4524	|| TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4525      || !(!gimple_cond_false_label (stmt)
4526	   || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4527    {
4528      error ("invalid labels in gimple cond");
4529      return true;
4530    }
4531
4532  return verify_gimple_comparison (boolean_type_node,
4533				   gimple_cond_lhs (stmt),
4534				   gimple_cond_rhs (stmt));
4535}
4536
4537/* Verify the GIMPLE statement STMT.  Returns true if there is an
4538   error, otherwise false.  */
4539
4540static bool
4541verify_gimple_stmt (gimple stmt)
4542{
4543  switch (gimple_code (stmt))
4544    {
4545    case GIMPLE_ASSIGN:
4546      return verify_gimple_assign (as_a <gassign *> (stmt));
4547
4548    case GIMPLE_LABEL:
4549      return verify_gimple_label (as_a <glabel *> (stmt));
4550
4551    case GIMPLE_CALL:
4552      return verify_gimple_call (as_a <gcall *> (stmt));
4553
4554    case GIMPLE_COND:
4555      return verify_gimple_cond (as_a <gcond *> (stmt));
4556
4557    case GIMPLE_GOTO:
4558      return verify_gimple_goto (as_a <ggoto *> (stmt));
4559
4560    case GIMPLE_SWITCH:
4561      return verify_gimple_switch (as_a <gswitch *> (stmt));
4562
4563    case GIMPLE_RETURN:
4564      return verify_gimple_return (as_a <greturn *> (stmt));
4565
4566    case GIMPLE_ASM:
4567      return false;
4568
4569    case GIMPLE_TRANSACTION:
4570      return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4571
4572    /* Tuples that do not have tree operands.  */
4573    case GIMPLE_NOP:
4574    case GIMPLE_PREDICT:
4575    case GIMPLE_RESX:
4576    case GIMPLE_EH_DISPATCH:
4577    case GIMPLE_EH_MUST_NOT_THROW:
4578      return false;
4579
4580    CASE_GIMPLE_OMP:
4581      /* OpenMP directives are validated by the FE and never operated
4582	 on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
4583	 non-gimple expressions when the main index variable has had
4584	 its address taken.  This does not affect the loop itself
4585	 because the header of an GIMPLE_OMP_FOR is merely used to determine
4586	 how to setup the parallel iteration.  */
4587      return false;
4588
4589    case GIMPLE_DEBUG:
4590      return verify_gimple_debug (stmt);
4591
4592    default:
4593      gcc_unreachable ();
4594    }
4595}
4596
4597/* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
4598   and false otherwise.  */
4599
4600static bool
4601verify_gimple_phi (gimple phi)
4602{
4603  bool err = false;
4604  unsigned i;
4605  tree phi_result = gimple_phi_result (phi);
4606  bool virtual_p;
4607
4608  if (!phi_result)
4609    {
4610      error ("invalid PHI result");
4611      return true;
4612    }
4613
4614  virtual_p = virtual_operand_p (phi_result);
4615  if (TREE_CODE (phi_result) != SSA_NAME
4616      || (virtual_p
4617	  && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4618    {
4619      error ("invalid PHI result");
4620      err = true;
4621    }
4622
4623  for (i = 0; i < gimple_phi_num_args (phi); i++)
4624    {
4625      tree t = gimple_phi_arg_def (phi, i);
4626
4627      if (!t)
4628	{
4629	  error ("missing PHI def");
4630	  err |= true;
4631	  continue;
4632	}
4633      /* Addressable variables do have SSA_NAMEs but they
4634	 are not considered gimple values.  */
4635      else if ((TREE_CODE (t) == SSA_NAME
4636		&& virtual_p != virtual_operand_p (t))
4637	       || (virtual_p
4638		   && (TREE_CODE (t) != SSA_NAME
4639		       || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4640	       || (!virtual_p
4641		   && !is_gimple_val (t)))
4642	{
4643	  error ("invalid PHI argument");
4644	  debug_generic_expr (t);
4645	  err |= true;
4646	}
4647#ifdef ENABLE_TYPES_CHECKING
4648      if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4649	{
4650	  error ("incompatible types in PHI argument %u", i);
4651	  debug_generic_stmt (TREE_TYPE (phi_result));
4652	  debug_generic_stmt (TREE_TYPE (t));
4653	  err |= true;
4654	}
4655#endif
4656    }
4657
4658  return err;
4659}
4660
4661/* Verify the GIMPLE statements inside the sequence STMTS.  */
4662
4663static bool
4664verify_gimple_in_seq_2 (gimple_seq stmts)
4665{
4666  gimple_stmt_iterator ittr;
4667  bool err = false;
4668
4669  for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4670    {
4671      gimple stmt = gsi_stmt (ittr);
4672
4673      switch (gimple_code (stmt))
4674        {
4675	case GIMPLE_BIND:
4676	  err |= verify_gimple_in_seq_2 (
4677                   gimple_bind_body (as_a <gbind *> (stmt)));
4678	  break;
4679
4680	case GIMPLE_TRY:
4681	  err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4682	  err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4683	  break;
4684
4685	case GIMPLE_EH_FILTER:
4686	  err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4687	  break;
4688
4689	case GIMPLE_EH_ELSE:
4690	  {
4691	    geh_else *eh_else = as_a <geh_else *> (stmt);
4692	    err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4693	    err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4694	  }
4695	  break;
4696
4697	case GIMPLE_CATCH:
4698	  err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4699					   as_a <gcatch *> (stmt)));
4700	  break;
4701
4702	case GIMPLE_TRANSACTION:
4703	  err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4704	  break;
4705
4706	default:
4707	  {
4708	    bool err2 = verify_gimple_stmt (stmt);
4709	    if (err2)
4710	      debug_gimple_stmt (stmt);
4711	    err |= err2;
4712	  }
4713	}
4714    }
4715
4716  return err;
4717}
4718
4719/* Verify the contents of a GIMPLE_TRANSACTION.  Returns true if there
4720   is a problem, otherwise false.  */
4721
4722static bool
4723verify_gimple_transaction (gtransaction *stmt)
4724{
4725  tree lab = gimple_transaction_label (stmt);
4726  if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4727    return true;
4728  return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4729}
4730
4731
4732/* Verify the GIMPLE statements inside the statement list STMTS.  */
4733
4734DEBUG_FUNCTION void
4735verify_gimple_in_seq (gimple_seq stmts)
4736{
4737  timevar_push (TV_TREE_STMT_VERIFY);
4738  if (verify_gimple_in_seq_2 (stmts))
4739    internal_error ("verify_gimple failed");
4740  timevar_pop (TV_TREE_STMT_VERIFY);
4741}
4742
4743/* Return true when the T can be shared.  */
4744
4745static bool
4746tree_node_can_be_shared (tree t)
4747{
4748  if (IS_TYPE_OR_DECL_P (t)
4749      || is_gimple_min_invariant (t)
4750      || TREE_CODE (t) == SSA_NAME
4751      || t == error_mark_node
4752      || TREE_CODE (t) == IDENTIFIER_NODE)
4753    return true;
4754
4755  if (TREE_CODE (t) == CASE_LABEL_EXPR)
4756    return true;
4757
4758  if (DECL_P (t))
4759    return true;
4760
4761  return false;
4762}
4763
4764/* Called via walk_tree.  Verify tree sharing.  */
4765
4766static tree
4767verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4768{
4769  hash_set<void *> *visited = (hash_set<void *> *) data;
4770
4771  if (tree_node_can_be_shared (*tp))
4772    {
4773      *walk_subtrees = false;
4774      return NULL;
4775    }
4776
4777  if (visited->add (*tp))
4778    return *tp;
4779
4780  return NULL;
4781}
4782
4783/* Called via walk_gimple_stmt.  Verify tree sharing.  */
4784
4785static tree
4786verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4787{
4788  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4789  return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4790}
4791
4792static bool eh_error_found;
4793bool
4794verify_eh_throw_stmt_node (const gimple &stmt, const int &,
4795			   hash_set<gimple> *visited)
4796{
4797  if (!visited->contains (stmt))
4798    {
4799      error ("dead STMT in EH table");
4800      debug_gimple_stmt (stmt);
4801      eh_error_found = true;
4802    }
4803  return true;
4804}
4805
4806/* Verify if the location LOCs block is in BLOCKS.  */
4807
4808static bool
4809verify_location (hash_set<tree> *blocks, location_t loc)
4810{
4811  tree block = LOCATION_BLOCK (loc);
4812  if (block != NULL_TREE
4813      && !blocks->contains (block))
4814    {
4815      error ("location references block not in block tree");
4816      return true;
4817    }
4818  if (block != NULL_TREE)
4819    return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4820  return false;
4821}
4822
4823/* Called via walk_tree.  Verify that expressions have no blocks.  */
4824
4825static tree
4826verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4827{
4828  if (!EXPR_P (*tp))
4829    {
4830      *walk_subtrees = false;
4831      return NULL;
4832    }
4833
4834  location_t loc = EXPR_LOCATION (*tp);
4835  if (LOCATION_BLOCK (loc) != NULL)
4836    return *tp;
4837
4838  return NULL;
4839}
4840
4841/* Called via walk_tree.  Verify locations of expressions.  */
4842
4843static tree
4844verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4845{
4846  hash_set<tree> *blocks = (hash_set<tree> *) data;
4847
4848  if (TREE_CODE (*tp) == VAR_DECL
4849      && DECL_HAS_DEBUG_EXPR_P (*tp))
4850    {
4851      tree t = DECL_DEBUG_EXPR (*tp);
4852      tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4853      if (addr)
4854	return addr;
4855    }
4856  if ((TREE_CODE (*tp) == VAR_DECL
4857       || TREE_CODE (*tp) == PARM_DECL
4858       || TREE_CODE (*tp) == RESULT_DECL)
4859      && DECL_HAS_VALUE_EXPR_P (*tp))
4860    {
4861      tree t = DECL_VALUE_EXPR (*tp);
4862      tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4863      if (addr)
4864	return addr;
4865    }
4866
4867  if (!EXPR_P (*tp))
4868    {
4869      *walk_subtrees = false;
4870      return NULL;
4871    }
4872
4873  location_t loc = EXPR_LOCATION (*tp);
4874  if (verify_location (blocks, loc))
4875    return *tp;
4876
4877  return NULL;
4878}
4879
4880/* Called via walk_gimple_op.  Verify locations of expressions.  */
4881
4882static tree
4883verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4884{
4885  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4886  return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4887}
4888
4889/* Insert all subblocks of BLOCK into BLOCKS and recurse.  */
4890
4891static void
4892collect_subblocks (hash_set<tree> *blocks, tree block)
4893{
4894  tree t;
4895  for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4896    {
4897      blocks->add (t);
4898      collect_subblocks (blocks, t);
4899    }
4900}
4901
4902/* Verify the GIMPLE statements in the CFG of FN.  */
4903
4904DEBUG_FUNCTION void
4905verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4906{
4907  basic_block bb;
4908  bool err = false;
4909
4910  timevar_push (TV_TREE_STMT_VERIFY);
4911  hash_set<void *> visited;
4912  hash_set<gimple> visited_stmts;
4913
4914  /* Collect all BLOCKs referenced by the BLOCK tree of FN.  */
4915  hash_set<tree> blocks;
4916  if (DECL_INITIAL (fn->decl))
4917    {
4918      blocks.add (DECL_INITIAL (fn->decl));
4919      collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4920    }
4921
4922  FOR_EACH_BB_FN (bb, fn)
4923    {
4924      gimple_stmt_iterator gsi;
4925
4926      for (gphi_iterator gpi = gsi_start_phis (bb);
4927	   !gsi_end_p (gpi);
4928	   gsi_next (&gpi))
4929	{
4930	  gphi *phi = gpi.phi ();
4931	  bool err2 = false;
4932	  unsigned i;
4933
4934	  visited_stmts.add (phi);
4935
4936	  if (gimple_bb (phi) != bb)
4937	    {
4938	      error ("gimple_bb (phi) is set to a wrong basic block");
4939	      err2 = true;
4940	    }
4941
4942	  err2 |= verify_gimple_phi (phi);
4943
4944	  /* Only PHI arguments have locations.  */
4945	  if (gimple_location (phi) != UNKNOWN_LOCATION)
4946	    {
4947	      error ("PHI node with location");
4948	      err2 = true;
4949	    }
4950
4951	  for (i = 0; i < gimple_phi_num_args (phi); i++)
4952	    {
4953	      tree arg = gimple_phi_arg_def (phi, i);
4954	      tree addr = walk_tree (&arg, verify_node_sharing_1,
4955				     &visited, NULL);
4956	      if (addr)
4957		{
4958		  error ("incorrect sharing of tree nodes");
4959		  debug_generic_expr (addr);
4960		  err2 |= true;
4961		}
4962	      location_t loc = gimple_phi_arg_location (phi, i);
4963	      if (virtual_operand_p (gimple_phi_result (phi))
4964		  && loc != UNKNOWN_LOCATION)
4965		{
4966		  error ("virtual PHI with argument locations");
4967		  err2 = true;
4968		}
4969	      addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
4970	      if (addr)
4971		{
4972		  debug_generic_expr (addr);
4973		  err2 = true;
4974		}
4975	      err2 |= verify_location (&blocks, loc);
4976	    }
4977
4978	  if (err2)
4979	    debug_gimple_stmt (phi);
4980	  err |= err2;
4981	}
4982
4983      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4984	{
4985	  gimple stmt = gsi_stmt (gsi);
4986	  bool err2 = false;
4987	  struct walk_stmt_info wi;
4988	  tree addr;
4989	  int lp_nr;
4990
4991	  visited_stmts.add (stmt);
4992
4993	  if (gimple_bb (stmt) != bb)
4994	    {
4995	      error ("gimple_bb (stmt) is set to a wrong basic block");
4996	      err2 = true;
4997	    }
4998
4999	  err2 |= verify_gimple_stmt (stmt);
5000	  err2 |= verify_location (&blocks, gimple_location (stmt));
5001
5002	  memset (&wi, 0, sizeof (wi));
5003	  wi.info = (void *) &visited;
5004	  addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5005	  if (addr)
5006	    {
5007	      error ("incorrect sharing of tree nodes");
5008	      debug_generic_expr (addr);
5009	      err2 |= true;
5010	    }
5011
5012	  memset (&wi, 0, sizeof (wi));
5013	  wi.info = (void *) &blocks;
5014	  addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5015	  if (addr)
5016	    {
5017	      debug_generic_expr (addr);
5018	      err2 |= true;
5019	    }
5020
5021	  /* ???  Instead of not checking these stmts at all the walker
5022	     should know its context via wi.  */
5023	  if (!is_gimple_debug (stmt)
5024	      && !is_gimple_omp (stmt))
5025	    {
5026	      memset (&wi, 0, sizeof (wi));
5027	      addr = walk_gimple_op (stmt, verify_expr, &wi);
5028	      if (addr)
5029		{
5030		  debug_generic_expr (addr);
5031		  inform (gimple_location (stmt), "in statement");
5032		  err2 |= true;
5033		}
5034	    }
5035
5036	  /* If the statement is marked as part of an EH region, then it is
5037	     expected that the statement could throw.  Verify that when we
5038	     have optimizations that simplify statements such that we prove
5039	     that they cannot throw, that we update other data structures
5040	     to match.  */
5041	  lp_nr = lookup_stmt_eh_lp (stmt);
5042	  if (lp_nr > 0)
5043	    {
5044	      if (!stmt_could_throw_p (stmt))
5045		{
5046		  if (verify_nothrow)
5047		    {
5048		      error ("statement marked for throw, but doesn%'t");
5049		      err2 |= true;
5050		    }
5051		}
5052	      else if (!gsi_one_before_end_p (gsi))
5053		{
5054		  error ("statement marked for throw in middle of block");
5055		  err2 |= true;
5056		}
5057	    }
5058
5059	  if (err2)
5060	    debug_gimple_stmt (stmt);
5061	  err |= err2;
5062	}
5063    }
5064
5065  eh_error_found = false;
5066  hash_map<gimple, int> *eh_table = get_eh_throw_stmt_table (cfun);
5067  if (eh_table)
5068    eh_table->traverse<hash_set<gimple> *, verify_eh_throw_stmt_node>
5069      (&visited_stmts);
5070
5071  if (err || eh_error_found)
5072    internal_error ("verify_gimple failed");
5073
5074  verify_histograms ();
5075  timevar_pop (TV_TREE_STMT_VERIFY);
5076}
5077
5078
5079/* Verifies that the flow information is OK.  */
5080
5081static int
5082gimple_verify_flow_info (void)
5083{
5084  int err = 0;
5085  basic_block bb;
5086  gimple_stmt_iterator gsi;
5087  gimple stmt;
5088  edge e;
5089  edge_iterator ei;
5090
5091  if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5092      || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5093    {
5094      error ("ENTRY_BLOCK has IL associated with it");
5095      err = 1;
5096    }
5097
5098  if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5099      || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5100    {
5101      error ("EXIT_BLOCK has IL associated with it");
5102      err = 1;
5103    }
5104
5105  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5106    if (e->flags & EDGE_FALLTHRU)
5107      {
5108	error ("fallthru to exit from bb %d", e->src->index);
5109	err = 1;
5110      }
5111
5112  FOR_EACH_BB_FN (bb, cfun)
5113    {
5114      bool found_ctrl_stmt = false;
5115
5116      stmt = NULL;
5117
5118      /* Skip labels on the start of basic block.  */
5119      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5120	{
5121	  tree label;
5122	  gimple prev_stmt = stmt;
5123
5124	  stmt = gsi_stmt (gsi);
5125
5126	  if (gimple_code (stmt) != GIMPLE_LABEL)
5127	    break;
5128
5129	  label = gimple_label_label (as_a <glabel *> (stmt));
5130	  if (prev_stmt && DECL_NONLOCAL (label))
5131	    {
5132	      error ("nonlocal label ");
5133	      print_generic_expr (stderr, label, 0);
5134	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5135		       bb->index);
5136	      err = 1;
5137	    }
5138
5139	  if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5140	    {
5141	      error ("EH landing pad label ");
5142	      print_generic_expr (stderr, label, 0);
5143	      fprintf (stderr, " is not first in a sequence of labels in bb %d",
5144		       bb->index);
5145	      err = 1;
5146	    }
5147
5148	  if (label_to_block (label) != bb)
5149	    {
5150	      error ("label ");
5151	      print_generic_expr (stderr, label, 0);
5152	      fprintf (stderr, " to block does not match in bb %d",
5153		       bb->index);
5154	      err = 1;
5155	    }
5156
5157	  if (decl_function_context (label) != current_function_decl)
5158	    {
5159	      error ("label ");
5160	      print_generic_expr (stderr, label, 0);
5161	      fprintf (stderr, " has incorrect context in bb %d",
5162		       bb->index);
5163	      err = 1;
5164	    }
5165	}
5166
5167      /* Verify that body of basic block BB is free of control flow.  */
5168      for (; !gsi_end_p (gsi); gsi_next (&gsi))
5169	{
5170	  gimple stmt = gsi_stmt (gsi);
5171
5172	  if (found_ctrl_stmt)
5173	    {
5174	      error ("control flow in the middle of basic block %d",
5175		     bb->index);
5176	      err = 1;
5177	    }
5178
5179	  if (stmt_ends_bb_p (stmt))
5180	    found_ctrl_stmt = true;
5181
5182	  if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5183	    {
5184	      error ("label ");
5185	      print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5186	      fprintf (stderr, " in the middle of basic block %d", bb->index);
5187	      err = 1;
5188	    }
5189	}
5190
5191      gsi = gsi_last_bb (bb);
5192      if (gsi_end_p (gsi))
5193	continue;
5194
5195      stmt = gsi_stmt (gsi);
5196
5197      if (gimple_code (stmt) == GIMPLE_LABEL)
5198	continue;
5199
5200      err |= verify_eh_edges (stmt);
5201
5202      if (is_ctrl_stmt (stmt))
5203	{
5204	  FOR_EACH_EDGE (e, ei, bb->succs)
5205	    if (e->flags & EDGE_FALLTHRU)
5206	      {
5207		error ("fallthru edge after a control statement in bb %d",
5208		       bb->index);
5209		err = 1;
5210	      }
5211	}
5212
5213      if (gimple_code (stmt) != GIMPLE_COND)
5214	{
5215	  /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5216	     after anything else but if statement.  */
5217	  FOR_EACH_EDGE (e, ei, bb->succs)
5218	    if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5219	      {
5220		error ("true/false edge after a non-GIMPLE_COND in bb %d",
5221		       bb->index);
5222		err = 1;
5223	      }
5224	}
5225
5226      switch (gimple_code (stmt))
5227	{
5228	case GIMPLE_COND:
5229	  {
5230	    edge true_edge;
5231	    edge false_edge;
5232
5233	    extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5234
5235	    if (!true_edge
5236		|| !false_edge
5237		|| !(true_edge->flags & EDGE_TRUE_VALUE)
5238		|| !(false_edge->flags & EDGE_FALSE_VALUE)
5239		|| (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5240		|| (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5241		|| EDGE_COUNT (bb->succs) >= 3)
5242	      {
5243		error ("wrong outgoing edge flags at end of bb %d",
5244		       bb->index);
5245		err = 1;
5246	      }
5247	  }
5248	  break;
5249
5250	case GIMPLE_GOTO:
5251	  if (simple_goto_p (stmt))
5252	    {
5253	      error ("explicit goto at end of bb %d", bb->index);
5254	      err = 1;
5255	    }
5256	  else
5257	    {
5258	      /* FIXME.  We should double check that the labels in the
5259		 destination blocks have their address taken.  */
5260	      FOR_EACH_EDGE (e, ei, bb->succs)
5261		if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5262				 | EDGE_FALSE_VALUE))
5263		    || !(e->flags & EDGE_ABNORMAL))
5264		  {
5265		    error ("wrong outgoing edge flags at end of bb %d",
5266			   bb->index);
5267		    err = 1;
5268		  }
5269	    }
5270	  break;
5271
5272	case GIMPLE_CALL:
5273	  if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5274	    break;
5275	  /* ... fallthru ... */
5276	case GIMPLE_RETURN:
5277	  if (!single_succ_p (bb)
5278	      || (single_succ_edge (bb)->flags
5279		  & (EDGE_FALLTHRU | EDGE_ABNORMAL
5280		     | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5281	    {
5282	      error ("wrong outgoing edge flags at end of bb %d", bb->index);
5283	      err = 1;
5284	    }
5285	  if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5286	    {
5287	      error ("return edge does not point to exit in bb %d",
5288		     bb->index);
5289	      err = 1;
5290	    }
5291	  break;
5292
5293	case GIMPLE_SWITCH:
5294	  {
5295	    gswitch *switch_stmt = as_a <gswitch *> (stmt);
5296	    tree prev;
5297	    edge e;
5298	    size_t i, n;
5299
5300	    n = gimple_switch_num_labels (switch_stmt);
5301
5302	    /* Mark all the destination basic blocks.  */
5303	    for (i = 0; i < n; ++i)
5304	      {
5305		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5306		basic_block label_bb = label_to_block (lab);
5307		gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5308		label_bb->aux = (void *)1;
5309	      }
5310
5311	    /* Verify that the case labels are sorted.  */
5312	    prev = gimple_switch_label (switch_stmt, 0);
5313	    for (i = 1; i < n; ++i)
5314	      {
5315		tree c = gimple_switch_label (switch_stmt, i);
5316		if (!CASE_LOW (c))
5317		  {
5318		    error ("found default case not at the start of "
5319			   "case vector");
5320		    err = 1;
5321		    continue;
5322		  }
5323		if (CASE_LOW (prev)
5324		    && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5325		  {
5326		    error ("case labels not sorted: ");
5327		    print_generic_expr (stderr, prev, 0);
5328		    fprintf (stderr," is greater than ");
5329		    print_generic_expr (stderr, c, 0);
5330		    fprintf (stderr," but comes before it.\n");
5331		    err = 1;
5332		  }
5333		prev = c;
5334	      }
5335	    /* VRP will remove the default case if it can prove it will
5336	       never be executed.  So do not verify there always exists
5337	       a default case here.  */
5338
5339	    FOR_EACH_EDGE (e, ei, bb->succs)
5340	      {
5341		if (!e->dest->aux)
5342		  {
5343		    error ("extra outgoing edge %d->%d",
5344			   bb->index, e->dest->index);
5345		    err = 1;
5346		  }
5347
5348		e->dest->aux = (void *)2;
5349		if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5350				 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5351		  {
5352		    error ("wrong outgoing edge flags at end of bb %d",
5353			   bb->index);
5354		    err = 1;
5355		  }
5356	      }
5357
5358	    /* Check that we have all of them.  */
5359	    for (i = 0; i < n; ++i)
5360	      {
5361		tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5362		basic_block label_bb = label_to_block (lab);
5363
5364		if (label_bb->aux != (void *)2)
5365		  {
5366		    error ("missing edge %i->%i", bb->index, label_bb->index);
5367		    err = 1;
5368		  }
5369	      }
5370
5371	    FOR_EACH_EDGE (e, ei, bb->succs)
5372	      e->dest->aux = (void *)0;
5373	  }
5374	  break;
5375
5376	case GIMPLE_EH_DISPATCH:
5377	  err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5378	  break;
5379
5380	default:
5381	  break;
5382	}
5383    }
5384
5385  if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5386    verify_dominators (CDI_DOMINATORS);
5387
5388  return err;
5389}
5390
5391
5392/* Updates phi nodes after creating a forwarder block joined
5393   by edge FALLTHRU.  */
5394
5395static void
5396gimple_make_forwarder_block (edge fallthru)
5397{
5398  edge e;
5399  edge_iterator ei;
5400  basic_block dummy, bb;
5401  tree var;
5402  gphi_iterator gsi;
5403
5404  dummy = fallthru->src;
5405  bb = fallthru->dest;
5406
5407  if (single_pred_p (bb))
5408    return;
5409
5410  /* If we redirected a branch we must create new PHI nodes at the
5411     start of BB.  */
5412  for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5413    {
5414      gphi *phi, *new_phi;
5415
5416      phi = gsi.phi ();
5417      var = gimple_phi_result (phi);
5418      new_phi = create_phi_node (var, bb);
5419      gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5420      add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5421		   UNKNOWN_LOCATION);
5422    }
5423
5424  /* Add the arguments we have stored on edges.  */
5425  FOR_EACH_EDGE (e, ei, bb->preds)
5426    {
5427      if (e == fallthru)
5428	continue;
5429
5430      flush_pending_stmts (e);
5431    }
5432}
5433
5434
5435/* Return a non-special label in the head of basic block BLOCK.
5436   Create one if it doesn't exist.  */
5437
5438tree
5439gimple_block_label (basic_block bb)
5440{
5441  gimple_stmt_iterator i, s = gsi_start_bb (bb);
5442  bool first = true;
5443  tree label;
5444  glabel *stmt;
5445
5446  for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5447    {
5448      stmt = dyn_cast <glabel *> (gsi_stmt (i));
5449      if (!stmt)
5450	break;
5451      label = gimple_label_label (stmt);
5452      if (!DECL_NONLOCAL (label))
5453	{
5454	  if (!first)
5455	    gsi_move_before (&i, &s);
5456	  return label;
5457	}
5458    }
5459
5460  label = create_artificial_label (UNKNOWN_LOCATION);
5461  stmt = gimple_build_label (label);
5462  gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5463  return label;
5464}
5465
5466
5467/* Attempt to perform edge redirection by replacing a possibly complex
5468   jump instruction by a goto or by removing the jump completely.
5469   This can apply only if all edges now point to the same block.  The
5470   parameters and return values are equivalent to
5471   redirect_edge_and_branch.  */
5472
5473static edge
5474gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5475{
5476  basic_block src = e->src;
5477  gimple_stmt_iterator i;
5478  gimple stmt;
5479
5480  /* We can replace or remove a complex jump only when we have exactly
5481     two edges.  */
5482  if (EDGE_COUNT (src->succs) != 2
5483      /* Verify that all targets will be TARGET.  Specifically, the
5484	 edge that is not E must also go to TARGET.  */
5485      || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5486    return NULL;
5487
5488  i = gsi_last_bb (src);
5489  if (gsi_end_p (i))
5490    return NULL;
5491
5492  stmt = gsi_stmt (i);
5493
5494  if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5495    {
5496      gsi_remove (&i, true);
5497      e = ssa_redirect_edge (e, target);
5498      e->flags = EDGE_FALLTHRU;
5499      return e;
5500    }
5501
5502  return NULL;
5503}
5504
5505
5506/* Redirect E to DEST.  Return NULL on failure.  Otherwise, return the
5507   edge representing the redirected branch.  */
5508
5509static edge
5510gimple_redirect_edge_and_branch (edge e, basic_block dest)
5511{
5512  basic_block bb = e->src;
5513  gimple_stmt_iterator gsi;
5514  edge ret;
5515  gimple stmt;
5516
5517  if (e->flags & EDGE_ABNORMAL)
5518    return NULL;
5519
5520  if (e->dest == dest)
5521    return NULL;
5522
5523  if (e->flags & EDGE_EH)
5524    return redirect_eh_edge (e, dest);
5525
5526  if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5527    {
5528      ret = gimple_try_redirect_by_replacing_jump (e, dest);
5529      if (ret)
5530	return ret;
5531    }
5532
5533  gsi = gsi_last_bb (bb);
5534  stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5535
5536  switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5537    {
5538    case GIMPLE_COND:
5539      /* For COND_EXPR, we only need to redirect the edge.  */
5540      break;
5541
5542    case GIMPLE_GOTO:
5543      /* No non-abnormal edges should lead from a non-simple goto, and
5544	 simple ones should be represented implicitly.  */
5545      gcc_unreachable ();
5546
5547    case GIMPLE_SWITCH:
5548      {
5549	gswitch *switch_stmt = as_a <gswitch *> (stmt);
5550	tree label = gimple_block_label (dest);
5551        tree cases = get_cases_for_edge (e, switch_stmt);
5552
5553	/* If we have a list of cases associated with E, then use it
5554	   as it's a lot faster than walking the entire case vector.  */
5555	if (cases)
5556	  {
5557	    edge e2 = find_edge (e->src, dest);
5558	    tree last, first;
5559
5560	    first = cases;
5561	    while (cases)
5562	      {
5563		last = cases;
5564		CASE_LABEL (cases) = label;
5565		cases = CASE_CHAIN (cases);
5566	      }
5567
5568	    /* If there was already an edge in the CFG, then we need
5569	       to move all the cases associated with E to E2.  */
5570	    if (e2)
5571	      {
5572		tree cases2 = get_cases_for_edge (e2, switch_stmt);
5573
5574		CASE_CHAIN (last) = CASE_CHAIN (cases2);
5575		CASE_CHAIN (cases2) = first;
5576	      }
5577	    bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5578	  }
5579	else
5580	  {
5581	    size_t i, n = gimple_switch_num_labels (switch_stmt);
5582
5583	    for (i = 0; i < n; i++)
5584	      {
5585		tree elt = gimple_switch_label (switch_stmt, i);
5586		if (label_to_block (CASE_LABEL (elt)) == e->dest)
5587		  CASE_LABEL (elt) = label;
5588	      }
5589	  }
5590      }
5591      break;
5592
5593    case GIMPLE_ASM:
5594      {
5595	gasm *asm_stmt = as_a <gasm *> (stmt);
5596	int i, n = gimple_asm_nlabels (asm_stmt);
5597	tree label = NULL;
5598
5599	for (i = 0; i < n; ++i)
5600	  {
5601	    tree cons = gimple_asm_label_op (asm_stmt, i);
5602	    if (label_to_block (TREE_VALUE (cons)) == e->dest)
5603	      {
5604		if (!label)
5605		  label = gimple_block_label (dest);
5606		TREE_VALUE (cons) = label;
5607	      }
5608	  }
5609
5610	/* If we didn't find any label matching the former edge in the
5611	   asm labels, we must be redirecting the fallthrough
5612	   edge.  */
5613	gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5614      }
5615      break;
5616
5617    case GIMPLE_RETURN:
5618      gsi_remove (&gsi, true);
5619      e->flags |= EDGE_FALLTHRU;
5620      break;
5621
5622    case GIMPLE_OMP_RETURN:
5623    case GIMPLE_OMP_CONTINUE:
5624    case GIMPLE_OMP_SECTIONS_SWITCH:
5625    case GIMPLE_OMP_FOR:
5626      /* The edges from OMP constructs can be simply redirected.  */
5627      break;
5628
5629    case GIMPLE_EH_DISPATCH:
5630      if (!(e->flags & EDGE_FALLTHRU))
5631	redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5632      break;
5633
5634    case GIMPLE_TRANSACTION:
5635      /* The ABORT edge has a stored label associated with it, otherwise
5636	 the edges are simply redirectable.  */
5637      if (e->flags == 0)
5638	gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5639				      gimple_block_label (dest));
5640      break;
5641
5642    default:
5643      /* Otherwise it must be a fallthru edge, and we don't need to
5644	 do anything besides redirecting it.  */
5645      gcc_assert (e->flags & EDGE_FALLTHRU);
5646      break;
5647    }
5648
5649  /* Update/insert PHI nodes as necessary.  */
5650
5651  /* Now update the edges in the CFG.  */
5652  e = ssa_redirect_edge (e, dest);
5653
5654  return e;
5655}
5656
5657/* Returns true if it is possible to remove edge E by redirecting
5658   it to the destination of the other edge from E->src.  */
5659
5660static bool
5661gimple_can_remove_branch_p (const_edge e)
5662{
5663  if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5664    return false;
5665
5666  return true;
5667}
5668
5669/* Simple wrapper, as we can always redirect fallthru edges.  */
5670
5671static basic_block
5672gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5673{
5674  e = gimple_redirect_edge_and_branch (e, dest);
5675  gcc_assert (e);
5676
5677  return NULL;
5678}
5679
5680
5681/* Splits basic block BB after statement STMT (but at least after the
5682   labels).  If STMT is NULL, BB is split just after the labels.  */
5683
5684static basic_block
5685gimple_split_block (basic_block bb, void *stmt)
5686{
5687  gimple_stmt_iterator gsi;
5688  gimple_stmt_iterator gsi_tgt;
5689  gimple_seq list;
5690  basic_block new_bb;
5691  edge e;
5692  edge_iterator ei;
5693
5694  new_bb = create_empty_bb (bb);
5695
5696  /* Redirect the outgoing edges.  */
5697  new_bb->succs = bb->succs;
5698  bb->succs = NULL;
5699  FOR_EACH_EDGE (e, ei, new_bb->succs)
5700    e->src = new_bb;
5701
5702  /* Get a stmt iterator pointing to the first stmt to move.  */
5703  if (!stmt || gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5704    gsi = gsi_after_labels (bb);
5705  else
5706    {
5707      gsi = gsi_for_stmt ((gimple) stmt);
5708      gsi_next (&gsi);
5709    }
5710
5711  /* Move everything from GSI to the new basic block.  */
5712  if (gsi_end_p (gsi))
5713    return new_bb;
5714
5715  /* Split the statement list - avoid re-creating new containers as this
5716     brings ugly quadratic memory consumption in the inliner.
5717     (We are still quadratic since we need to update stmt BB pointers,
5718     sadly.)  */
5719  gsi_split_seq_before (&gsi, &list);
5720  set_bb_seq (new_bb, list);
5721  for (gsi_tgt = gsi_start (list);
5722       !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5723    gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5724
5725  return new_bb;
5726}
5727
5728
5729/* Moves basic block BB after block AFTER.  */
5730
5731static bool
5732gimple_move_block_after (basic_block bb, basic_block after)
5733{
5734  if (bb->prev_bb == after)
5735    return true;
5736
5737  unlink_block (bb);
5738  link_block (bb, after);
5739
5740  return true;
5741}
5742
5743
5744/* Return TRUE if block BB has no executable statements, otherwise return
5745   FALSE.  */
5746
5747static bool
5748gimple_empty_block_p (basic_block bb)
5749{
5750  /* BB must have no executable statements.  */
5751  gimple_stmt_iterator gsi = gsi_after_labels (bb);
5752  if (phi_nodes (bb))
5753    return false;
5754  if (gsi_end_p (gsi))
5755    return true;
5756  if (is_gimple_debug (gsi_stmt (gsi)))
5757    gsi_next_nondebug (&gsi);
5758  return gsi_end_p (gsi);
5759}
5760
5761
5762/* Split a basic block if it ends with a conditional branch and if the
5763   other part of the block is not empty.  */
5764
5765static basic_block
5766gimple_split_block_before_cond_jump (basic_block bb)
5767{
5768  gimple last, split_point;
5769  gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5770  if (gsi_end_p (gsi))
5771    return NULL;
5772  last = gsi_stmt (gsi);
5773  if (gimple_code (last) != GIMPLE_COND
5774      && gimple_code (last) != GIMPLE_SWITCH)
5775    return NULL;
5776  gsi_prev_nondebug (&gsi);
5777  split_point = gsi_stmt (gsi);
5778  return split_block (bb, split_point)->dest;
5779}
5780
5781
5782/* Return true if basic_block can be duplicated.  */
5783
5784static bool
5785gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5786{
5787  return true;
5788}
5789
5790/* Create a duplicate of the basic block BB.  NOTE: This does not
5791   preserve SSA form.  */
5792
5793static basic_block
5794gimple_duplicate_bb (basic_block bb)
5795{
5796  basic_block new_bb;
5797  gimple_stmt_iterator gsi_tgt;
5798
5799  new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5800
5801  /* Copy the PHI nodes.  We ignore PHI node arguments here because
5802     the incoming edges have not been setup yet.  */
5803  for (gphi_iterator gpi = gsi_start_phis (bb);
5804       !gsi_end_p (gpi);
5805       gsi_next (&gpi))
5806    {
5807      gphi *phi, *copy;
5808      phi = gpi.phi ();
5809      copy = create_phi_node (NULL_TREE, new_bb);
5810      create_new_def_for (gimple_phi_result (phi), copy,
5811			  gimple_phi_result_ptr (copy));
5812      gimple_set_uid (copy, gimple_uid (phi));
5813    }
5814
5815  gsi_tgt = gsi_start_bb (new_bb);
5816  for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5817       !gsi_end_p (gsi);
5818       gsi_next (&gsi))
5819    {
5820      def_operand_p def_p;
5821      ssa_op_iter op_iter;
5822      tree lhs;
5823      gimple stmt, copy;
5824
5825      stmt = gsi_stmt (gsi);
5826      if (gimple_code (stmt) == GIMPLE_LABEL)
5827	continue;
5828
5829      /* Don't duplicate label debug stmts.  */
5830      if (gimple_debug_bind_p (stmt)
5831	  && TREE_CODE (gimple_debug_bind_get_var (stmt))
5832	     == LABEL_DECL)
5833	continue;
5834
5835      /* Create a new copy of STMT and duplicate STMT's virtual
5836	 operands.  */
5837      copy = gimple_copy (stmt);
5838      gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5839
5840      maybe_duplicate_eh_stmt (copy, stmt);
5841      gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5842
5843      /* When copying around a stmt writing into a local non-user
5844	 aggregate, make sure it won't share stack slot with other
5845	 vars.  */
5846      lhs = gimple_get_lhs (stmt);
5847      if (lhs && TREE_CODE (lhs) != SSA_NAME)
5848	{
5849	  tree base = get_base_address (lhs);
5850	  if (base
5851	      && (TREE_CODE (base) == VAR_DECL
5852		  || TREE_CODE (base) == RESULT_DECL)
5853	      && DECL_IGNORED_P (base)
5854	      && !TREE_STATIC (base)
5855	      && !DECL_EXTERNAL (base)
5856	      && (TREE_CODE (base) != VAR_DECL
5857		  || !DECL_HAS_VALUE_EXPR_P (base)))
5858	    DECL_NONSHAREABLE (base) = 1;
5859	}
5860
5861      /* Create new names for all the definitions created by COPY and
5862	 add replacement mappings for each new name.  */
5863      FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5864	create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5865    }
5866
5867  return new_bb;
5868}
5869
5870/* Adds phi node arguments for edge E_COPY after basic block duplication.  */
5871
5872static void
5873add_phi_args_after_copy_edge (edge e_copy)
5874{
5875  basic_block bb, bb_copy = e_copy->src, dest;
5876  edge e;
5877  edge_iterator ei;
5878  gphi *phi, *phi_copy;
5879  tree def;
5880  gphi_iterator psi, psi_copy;
5881
5882  if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5883    return;
5884
5885  bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5886
5887  if (e_copy->dest->flags & BB_DUPLICATED)
5888    dest = get_bb_original (e_copy->dest);
5889  else
5890    dest = e_copy->dest;
5891
5892  e = find_edge (bb, dest);
5893  if (!e)
5894    {
5895      /* During loop unrolling the target of the latch edge is copied.
5896	 In this case we are not looking for edge to dest, but to
5897	 duplicated block whose original was dest.  */
5898      FOR_EACH_EDGE (e, ei, bb->succs)
5899	{
5900	  if ((e->dest->flags & BB_DUPLICATED)
5901	      && get_bb_original (e->dest) == dest)
5902	    break;
5903	}
5904
5905      gcc_assert (e != NULL);
5906    }
5907
5908  for (psi = gsi_start_phis (e->dest),
5909       psi_copy = gsi_start_phis (e_copy->dest);
5910       !gsi_end_p (psi);
5911       gsi_next (&psi), gsi_next (&psi_copy))
5912    {
5913      phi = psi.phi ();
5914      phi_copy = psi_copy.phi ();
5915      def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5916      add_phi_arg (phi_copy, def, e_copy,
5917		   gimple_phi_arg_location_from_edge (phi, e));
5918    }
5919}
5920
5921
5922/* Basic block BB_COPY was created by code duplication.  Add phi node
5923   arguments for edges going out of BB_COPY.  The blocks that were
5924   duplicated have BB_DUPLICATED set.  */
5925
5926void
5927add_phi_args_after_copy_bb (basic_block bb_copy)
5928{
5929  edge e_copy;
5930  edge_iterator ei;
5931
5932  FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5933    {
5934      add_phi_args_after_copy_edge (e_copy);
5935    }
5936}
5937
5938/* Blocks in REGION_COPY array of length N_REGION were created by
5939   duplication of basic blocks.  Add phi node arguments for edges
5940   going from these blocks.  If E_COPY is not NULL, also add
5941   phi node arguments for its destination.*/
5942
5943void
5944add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5945			 edge e_copy)
5946{
5947  unsigned i;
5948
5949  for (i = 0; i < n_region; i++)
5950    region_copy[i]->flags |= BB_DUPLICATED;
5951
5952  for (i = 0; i < n_region; i++)
5953    add_phi_args_after_copy_bb (region_copy[i]);
5954  if (e_copy)
5955    add_phi_args_after_copy_edge (e_copy);
5956
5957  for (i = 0; i < n_region; i++)
5958    region_copy[i]->flags &= ~BB_DUPLICATED;
5959}
5960
5961/* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5962   important exit edge EXIT.  By important we mean that no SSA name defined
5963   inside region is live over the other exit edges of the region.  All entry
5964   edges to the region must go to ENTRY->dest.  The edge ENTRY is redirected
5965   to the duplicate of the region.  Dominance and loop information is
5966   updated if UPDATE_DOMINANCE is true, but not the SSA web.  If
5967   UPDATE_DOMINANCE is false then we assume that the caller will update the
5968   dominance information after calling this function.  The new basic
5969   blocks are stored to REGION_COPY in the same order as they had in REGION,
5970   provided that REGION_COPY is not NULL.
5971   The function returns false if it is unable to copy the region,
5972   true otherwise.  */
5973
5974bool
5975gimple_duplicate_sese_region (edge entry, edge exit,
5976			    basic_block *region, unsigned n_region,
5977			    basic_block *region_copy,
5978			    bool update_dominance)
5979{
5980  unsigned i;
5981  bool free_region_copy = false, copying_header = false;
5982  struct loop *loop = entry->dest->loop_father;
5983  edge exit_copy;
5984  vec<basic_block> doms;
5985  edge redirected;
5986  int total_freq = 0, entry_freq = 0;
5987  gcov_type total_count = 0, entry_count = 0;
5988
5989  if (!can_copy_bbs_p (region, n_region))
5990    return false;
5991
5992  /* Some sanity checking.  Note that we do not check for all possible
5993     missuses of the functions.  I.e. if you ask to copy something weird,
5994     it will work, but the state of structures probably will not be
5995     correct.  */
5996  for (i = 0; i < n_region; i++)
5997    {
5998      /* We do not handle subloops, i.e. all the blocks must belong to the
5999	 same loop.  */
6000      if (region[i]->loop_father != loop)
6001	return false;
6002
6003      if (region[i] != entry->dest
6004	  && region[i] == loop->header)
6005	return false;
6006    }
6007
6008  /* In case the function is used for loop header copying (which is the primary
6009     use), ensure that EXIT and its copy will be new latch and entry edges.  */
6010  if (loop->header == entry->dest)
6011    {
6012      copying_header = true;
6013
6014      if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6015	return false;
6016
6017      for (i = 0; i < n_region; i++)
6018	if (region[i] != exit->src
6019	    && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6020	  return false;
6021    }
6022
6023  initialize_original_copy_tables ();
6024
6025  if (copying_header)
6026    set_loop_copy (loop, loop_outer (loop));
6027  else
6028    set_loop_copy (loop, loop);
6029
6030  if (!region_copy)
6031    {
6032      region_copy = XNEWVEC (basic_block, n_region);
6033      free_region_copy = true;
6034    }
6035
6036  /* Record blocks outside the region that are dominated by something
6037     inside.  */
6038  if (update_dominance)
6039    {
6040      doms.create (0);
6041      doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6042    }
6043
6044  if (entry->dest->count)
6045    {
6046      total_count = entry->dest->count;
6047      entry_count = entry->count;
6048      /* Fix up corner cases, to avoid division by zero or creation of negative
6049	 frequencies.  */
6050      if (entry_count > total_count)
6051	entry_count = total_count;
6052    }
6053  else
6054    {
6055      total_freq = entry->dest->frequency;
6056      entry_freq = EDGE_FREQUENCY (entry);
6057      /* Fix up corner cases, to avoid division by zero or creation of negative
6058	 frequencies.  */
6059      if (total_freq == 0)
6060	total_freq = 1;
6061      else if (entry_freq > total_freq)
6062	entry_freq = total_freq;
6063    }
6064
6065  copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6066	    split_edge_bb_loc (entry), update_dominance);
6067  if (total_count)
6068    {
6069      scale_bbs_frequencies_gcov_type (region, n_region,
6070				       total_count - entry_count,
6071				       total_count);
6072      scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6073				       total_count);
6074    }
6075  else
6076    {
6077      scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6078				 total_freq);
6079      scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6080    }
6081
6082  if (copying_header)
6083    {
6084      loop->header = exit->dest;
6085      loop->latch = exit->src;
6086    }
6087
6088  /* Redirect the entry and add the phi node arguments.  */
6089  redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6090  gcc_assert (redirected != NULL);
6091  flush_pending_stmts (entry);
6092
6093  /* Concerning updating of dominators:  We must recount dominators
6094     for entry block and its copy.  Anything that is outside of the
6095     region, but was dominated by something inside needs recounting as
6096     well.  */
6097  if (update_dominance)
6098    {
6099      set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6100      doms.safe_push (get_bb_original (entry->dest));
6101      iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6102      doms.release ();
6103    }
6104
6105  /* Add the other PHI node arguments.  */
6106  add_phi_args_after_copy (region_copy, n_region, NULL);
6107
6108  if (free_region_copy)
6109    free (region_copy);
6110
6111  free_original_copy_tables ();
6112  return true;
6113}
6114
6115/* Checks if BB is part of the region defined by N_REGION BBS.  */
6116static bool
6117bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6118{
6119  unsigned int n;
6120
6121  for (n = 0; n < n_region; n++)
6122    {
6123     if (bb == bbs[n])
6124       return true;
6125    }
6126  return false;
6127}
6128
6129/* Duplicates REGION consisting of N_REGION blocks.  The new blocks
6130   are stored to REGION_COPY in the same order in that they appear
6131   in REGION, if REGION_COPY is not NULL.  ENTRY is the entry to
6132   the region, EXIT an exit from it.  The condition guarding EXIT
6133   is moved to ENTRY.  Returns true if duplication succeeds, false
6134   otherwise.
6135
6136   For example,
6137
6138   some_code;
6139   if (cond)
6140     A;
6141   else
6142     B;
6143
6144   is transformed to
6145
6146   if (cond)
6147     {
6148       some_code;
6149       A;
6150     }
6151   else
6152     {
6153       some_code;
6154       B;
6155     }
6156*/
6157
6158bool
6159gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6160			  basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6161			  basic_block *region_copy ATTRIBUTE_UNUSED)
6162{
6163  unsigned i;
6164  bool free_region_copy = false;
6165  struct loop *loop = exit->dest->loop_father;
6166  struct loop *orig_loop = entry->dest->loop_father;
6167  basic_block switch_bb, entry_bb, nentry_bb;
6168  vec<basic_block> doms;
6169  int total_freq = 0, exit_freq = 0;
6170  gcov_type total_count = 0, exit_count = 0;
6171  edge exits[2], nexits[2], e;
6172  gimple_stmt_iterator gsi;
6173  gimple cond_stmt;
6174  edge sorig, snew;
6175  basic_block exit_bb;
6176  gphi_iterator psi;
6177  gphi *phi;
6178  tree def;
6179  struct loop *target, *aloop, *cloop;
6180
6181  gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6182  exits[0] = exit;
6183  exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6184
6185  if (!can_copy_bbs_p (region, n_region))
6186    return false;
6187
6188  initialize_original_copy_tables ();
6189  set_loop_copy (orig_loop, loop);
6190
6191  target= loop;
6192  for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6193    {
6194      if (bb_part_of_region_p (aloop->header, region, n_region))
6195	{
6196	  cloop = duplicate_loop (aloop, target);
6197	  duplicate_subloops (aloop, cloop);
6198	}
6199    }
6200
6201  if (!region_copy)
6202    {
6203      region_copy = XNEWVEC (basic_block, n_region);
6204      free_region_copy = true;
6205    }
6206
6207  gcc_assert (!need_ssa_update_p (cfun));
6208
6209  /* Record blocks outside the region that are dominated by something
6210     inside.  */
6211  doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6212
6213  if (exit->src->count)
6214    {
6215      total_count = exit->src->count;
6216      exit_count = exit->count;
6217      /* Fix up corner cases, to avoid division by zero or creation of negative
6218	 frequencies.  */
6219      if (exit_count > total_count)
6220	exit_count = total_count;
6221    }
6222  else
6223    {
6224      total_freq = exit->src->frequency;
6225      exit_freq = EDGE_FREQUENCY (exit);
6226      /* Fix up corner cases, to avoid division by zero or creation of negative
6227	 frequencies.  */
6228      if (total_freq == 0)
6229	total_freq = 1;
6230      if (exit_freq > total_freq)
6231	exit_freq = total_freq;
6232    }
6233
6234  copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6235	    split_edge_bb_loc (exit), true);
6236  if (total_count)
6237    {
6238      scale_bbs_frequencies_gcov_type (region, n_region,
6239				       total_count - exit_count,
6240				       total_count);
6241      scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6242				       total_count);
6243    }
6244  else
6245    {
6246      scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6247				 total_freq);
6248      scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6249    }
6250
6251  /* Create the switch block, and put the exit condition to it.  */
6252  entry_bb = entry->dest;
6253  nentry_bb = get_bb_copy (entry_bb);
6254  if (!last_stmt (entry->src)
6255      || !stmt_ends_bb_p (last_stmt (entry->src)))
6256    switch_bb = entry->src;
6257  else
6258    switch_bb = split_edge (entry);
6259  set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6260
6261  gsi = gsi_last_bb (switch_bb);
6262  cond_stmt = last_stmt (exit->src);
6263  gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6264  cond_stmt = gimple_copy (cond_stmt);
6265
6266  gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6267
6268  sorig = single_succ_edge (switch_bb);
6269  sorig->flags = exits[1]->flags;
6270  snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6271
6272  /* Register the new edge from SWITCH_BB in loop exit lists.  */
6273  rescan_loop_exit (snew, true, false);
6274
6275  /* Add the PHI node arguments.  */
6276  add_phi_args_after_copy (region_copy, n_region, snew);
6277
6278  /* Get rid of now superfluous conditions and associated edges (and phi node
6279     arguments).  */
6280  exit_bb = exit->dest;
6281
6282  e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6283  PENDING_STMT (e) = NULL;
6284
6285  /* The latch of ORIG_LOOP was copied, and so was the backedge
6286     to the original header.  We redirect this backedge to EXIT_BB.  */
6287  for (i = 0; i < n_region; i++)
6288    if (get_bb_original (region_copy[i]) == orig_loop->latch)
6289      {
6290	gcc_assert (single_succ_edge (region_copy[i]));
6291	e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6292	PENDING_STMT (e) = NULL;
6293	for (psi = gsi_start_phis (exit_bb);
6294	     !gsi_end_p (psi);
6295	     gsi_next (&psi))
6296	  {
6297	    phi = psi.phi ();
6298	    def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6299	    add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6300	  }
6301      }
6302  e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6303  PENDING_STMT (e) = NULL;
6304
6305  /* Anything that is outside of the region, but was dominated by something
6306     inside needs to update dominance info.  */
6307  iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6308  doms.release ();
6309  /* Update the SSA web.  */
6310  update_ssa (TODO_update_ssa);
6311
6312  if (free_region_copy)
6313    free (region_copy);
6314
6315  free_original_copy_tables ();
6316  return true;
6317}
6318
6319/* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
6320   adding blocks when the dominator traversal reaches EXIT.  This
6321   function silently assumes that ENTRY strictly dominates EXIT.  */
6322
6323void
6324gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6325			      vec<basic_block> *bbs_p)
6326{
6327  basic_block son;
6328
6329  for (son = first_dom_son (CDI_DOMINATORS, entry);
6330       son;
6331       son = next_dom_son (CDI_DOMINATORS, son))
6332    {
6333      bbs_p->safe_push (son);
6334      if (son != exit)
6335	gather_blocks_in_sese_region (son, exit, bbs_p);
6336    }
6337}
6338
6339/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6340   The duplicates are recorded in VARS_MAP.  */
6341
6342static void
6343replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6344			   tree to_context)
6345{
6346  tree t = *tp, new_t;
6347  struct function *f = DECL_STRUCT_FUNCTION (to_context);
6348
6349  if (DECL_CONTEXT (t) == to_context)
6350    return;
6351
6352  bool existed;
6353  tree &loc = vars_map->get_or_insert (t, &existed);
6354
6355  if (!existed)
6356    {
6357      if (SSA_VAR_P (t))
6358	{
6359	  new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6360	  add_local_decl (f, new_t);
6361	}
6362      else
6363	{
6364	  gcc_assert (TREE_CODE (t) == CONST_DECL);
6365	  new_t = copy_node (t);
6366	}
6367      DECL_CONTEXT (new_t) = to_context;
6368
6369      loc = new_t;
6370    }
6371  else
6372    new_t = loc;
6373
6374  *tp = new_t;
6375}
6376
6377
6378/* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6379   VARS_MAP maps old ssa names and var_decls to the new ones.  */
6380
6381static tree
6382replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6383		  tree to_context)
6384{
6385  tree new_name;
6386
6387  gcc_assert (!virtual_operand_p (name));
6388
6389  tree *loc = vars_map->get (name);
6390
6391  if (!loc)
6392    {
6393      tree decl = SSA_NAME_VAR (name);
6394      if (decl)
6395	{
6396	  replace_by_duplicate_decl (&decl, vars_map, to_context);
6397	  new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6398				       decl, SSA_NAME_DEF_STMT (name));
6399	  if (SSA_NAME_IS_DEFAULT_DEF (name))
6400	    set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6401				 decl, new_name);
6402	}
6403      else
6404	new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6405				     name, SSA_NAME_DEF_STMT (name));
6406
6407      vars_map->put (name, new_name);
6408    }
6409  else
6410    new_name = *loc;
6411
6412  return new_name;
6413}
6414
6415struct move_stmt_d
6416{
6417  tree orig_block;
6418  tree new_block;
6419  tree from_context;
6420  tree to_context;
6421  hash_map<tree, tree> *vars_map;
6422  htab_t new_label_map;
6423  hash_map<void *, void *> *eh_map;
6424  bool remap_decls_p;
6425};
6426
6427/* Helper for move_block_to_fn.  Set TREE_BLOCK in every expression
6428   contained in *TP if it has been ORIG_BLOCK previously and change the
6429   DECL_CONTEXT of every local variable referenced in *TP.  */
6430
6431static tree
6432move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6433{
6434  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6435  struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6436  tree t = *tp;
6437
6438  if (EXPR_P (t))
6439    {
6440      tree block = TREE_BLOCK (t);
6441      if (block == p->orig_block
6442	  || (p->orig_block == NULL_TREE
6443	      && block != NULL_TREE))
6444	TREE_SET_BLOCK (t, p->new_block);
6445#ifdef ENABLE_CHECKING
6446      else if (block != NULL_TREE)
6447	{
6448	  while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6449	    block = BLOCK_SUPERCONTEXT (block);
6450	  gcc_assert (block == p->orig_block);
6451	}
6452#endif
6453    }
6454  else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6455    {
6456      if (TREE_CODE (t) == SSA_NAME)
6457	*tp = replace_ssa_name (t, p->vars_map, p->to_context);
6458      else if (TREE_CODE (t) == LABEL_DECL)
6459	{
6460	  if (p->new_label_map)
6461	    {
6462	      struct tree_map in, *out;
6463	      in.base.from = t;
6464	      out = (struct tree_map *)
6465		htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6466	      if (out)
6467		*tp = t = out->to;
6468	    }
6469
6470	  DECL_CONTEXT (t) = p->to_context;
6471	}
6472      else if (p->remap_decls_p)
6473	{
6474	  /* Replace T with its duplicate.  T should no longer appear in the
6475	     parent function, so this looks wasteful; however, it may appear
6476	     in referenced_vars, and more importantly, as virtual operands of
6477	     statements, and in alias lists of other variables.  It would be
6478	     quite difficult to expunge it from all those places.  ??? It might
6479	     suffice to do this for addressable variables.  */
6480	  if ((TREE_CODE (t) == VAR_DECL
6481	       && !is_global_var (t))
6482	      || TREE_CODE (t) == CONST_DECL)
6483	    replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6484	}
6485      *walk_subtrees = 0;
6486    }
6487  else if (TYPE_P (t))
6488    *walk_subtrees = 0;
6489
6490  return NULL_TREE;
6491}
6492
6493/* Helper for move_stmt_r.  Given an EH region number for the source
6494   function, map that to the duplicate EH regio number in the dest.  */
6495
6496static int
6497move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6498{
6499  eh_region old_r, new_r;
6500
6501  old_r = get_eh_region_from_number (old_nr);
6502  new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6503
6504  return new_r->index;
6505}
6506
6507/* Similar, but operate on INTEGER_CSTs.  */
6508
6509static tree
6510move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6511{
6512  int old_nr, new_nr;
6513
6514  old_nr = tree_to_shwi (old_t_nr);
6515  new_nr = move_stmt_eh_region_nr (old_nr, p);
6516
6517  return build_int_cst (integer_type_node, new_nr);
6518}
6519
6520/* Like move_stmt_op, but for gimple statements.
6521
6522   Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
6523   contained in the current statement in *GSI_P and change the
6524   DECL_CONTEXT of every local variable referenced in the current
6525   statement.  */
6526
6527static tree
6528move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6529	     struct walk_stmt_info *wi)
6530{
6531  struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6532  gimple stmt = gsi_stmt (*gsi_p);
6533  tree block = gimple_block (stmt);
6534
6535  if (block == p->orig_block
6536      || (p->orig_block == NULL_TREE
6537	  && block != NULL_TREE))
6538    gimple_set_block (stmt, p->new_block);
6539
6540  switch (gimple_code (stmt))
6541    {
6542    case GIMPLE_CALL:
6543      /* Remap the region numbers for __builtin_eh_{pointer,filter}.  */
6544      {
6545	tree r, fndecl = gimple_call_fndecl (stmt);
6546	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6547	  switch (DECL_FUNCTION_CODE (fndecl))
6548	    {
6549	    case BUILT_IN_EH_COPY_VALUES:
6550	      r = gimple_call_arg (stmt, 1);
6551	      r = move_stmt_eh_region_tree_nr (r, p);
6552	      gimple_call_set_arg (stmt, 1, r);
6553	      /* FALLTHRU */
6554
6555	    case BUILT_IN_EH_POINTER:
6556	    case BUILT_IN_EH_FILTER:
6557	      r = gimple_call_arg (stmt, 0);
6558	      r = move_stmt_eh_region_tree_nr (r, p);
6559	      gimple_call_set_arg (stmt, 0, r);
6560	      break;
6561
6562	    default:
6563	      break;
6564	    }
6565      }
6566      break;
6567
6568    case GIMPLE_RESX:
6569      {
6570	gresx *resx_stmt = as_a <gresx *> (stmt);
6571	int r = gimple_resx_region (resx_stmt);
6572	r = move_stmt_eh_region_nr (r, p);
6573	gimple_resx_set_region (resx_stmt, r);
6574      }
6575      break;
6576
6577    case GIMPLE_EH_DISPATCH:
6578      {
6579	geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6580	int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6581	r = move_stmt_eh_region_nr (r, p);
6582	gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6583      }
6584      break;
6585
6586    case GIMPLE_OMP_RETURN:
6587    case GIMPLE_OMP_CONTINUE:
6588      break;
6589    default:
6590      if (is_gimple_omp (stmt))
6591	{
6592	  /* Do not remap variables inside OMP directives.  Variables
6593	     referenced in clauses and directive header belong to the
6594	     parent function and should not be moved into the child
6595	     function.  */
6596	  bool save_remap_decls_p = p->remap_decls_p;
6597	  p->remap_decls_p = false;
6598	  *handled_ops_p = true;
6599
6600	  walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6601			       move_stmt_op, wi);
6602
6603	  p->remap_decls_p = save_remap_decls_p;
6604	}
6605      break;
6606    }
6607
6608  return NULL_TREE;
6609}
6610
6611/* Move basic block BB from function CFUN to function DEST_FN.  The
6612   block is moved out of the original linked list and placed after
6613   block AFTER in the new list.  Also, the block is removed from the
6614   original array of blocks and placed in DEST_FN's array of blocks.
6615   If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6616   updated to reflect the moved edges.
6617
6618   The local variables are remapped to new instances, VARS_MAP is used
6619   to record the mapping.  */
6620
6621static void
6622move_block_to_fn (struct function *dest_cfun, basic_block bb,
6623		  basic_block after, bool update_edge_count_p,
6624		  struct move_stmt_d *d)
6625{
6626  struct control_flow_graph *cfg;
6627  edge_iterator ei;
6628  edge e;
6629  gimple_stmt_iterator si;
6630  unsigned old_len, new_len;
6631
6632  /* Remove BB from dominance structures.  */
6633  delete_from_dominance_info (CDI_DOMINATORS, bb);
6634
6635  /* Move BB from its current loop to the copy in the new function.  */
6636  if (current_loops)
6637    {
6638      struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6639      if (new_loop)
6640	bb->loop_father = new_loop;
6641    }
6642
6643  /* Link BB to the new linked list.  */
6644  move_block_after (bb, after);
6645
6646  /* Update the edge count in the corresponding flowgraphs.  */
6647  if (update_edge_count_p)
6648    FOR_EACH_EDGE (e, ei, bb->succs)
6649      {
6650	cfun->cfg->x_n_edges--;
6651	dest_cfun->cfg->x_n_edges++;
6652      }
6653
6654  /* Remove BB from the original basic block array.  */
6655  (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6656  cfun->cfg->x_n_basic_blocks--;
6657
6658  /* Grow DEST_CFUN's basic block array if needed.  */
6659  cfg = dest_cfun->cfg;
6660  cfg->x_n_basic_blocks++;
6661  if (bb->index >= cfg->x_last_basic_block)
6662    cfg->x_last_basic_block = bb->index + 1;
6663
6664  old_len = vec_safe_length (cfg->x_basic_block_info);
6665  if ((unsigned) cfg->x_last_basic_block >= old_len)
6666    {
6667      new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6668      vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6669    }
6670
6671  (*cfg->x_basic_block_info)[bb->index] = bb;
6672
6673  /* Remap the variables in phi nodes.  */
6674  for (gphi_iterator psi = gsi_start_phis (bb);
6675       !gsi_end_p (psi); )
6676    {
6677      gphi *phi = psi.phi ();
6678      use_operand_p use;
6679      tree op = PHI_RESULT (phi);
6680      ssa_op_iter oi;
6681      unsigned i;
6682
6683      if (virtual_operand_p (op))
6684	{
6685	  /* Remove the phi nodes for virtual operands (alias analysis will be
6686	     run for the new function, anyway).  */
6687          remove_phi_node (&psi, true);
6688	  continue;
6689	}
6690
6691      SET_PHI_RESULT (phi,
6692		      replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6693      FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6694	{
6695	  op = USE_FROM_PTR (use);
6696	  if (TREE_CODE (op) == SSA_NAME)
6697	    SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6698	}
6699
6700      for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6701	{
6702	  location_t locus = gimple_phi_arg_location (phi, i);
6703	  tree block = LOCATION_BLOCK (locus);
6704
6705	  if (locus == UNKNOWN_LOCATION)
6706	    continue;
6707	  if (d->orig_block == NULL_TREE || block == d->orig_block)
6708	    {
6709	      if (d->new_block == NULL_TREE)
6710		locus = LOCATION_LOCUS (locus);
6711	      else
6712		locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6713	      gimple_phi_arg_set_location (phi, i, locus);
6714	    }
6715	}
6716
6717      gsi_next (&psi);
6718    }
6719
6720  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6721    {
6722      gimple stmt = gsi_stmt (si);
6723      struct walk_stmt_info wi;
6724
6725      memset (&wi, 0, sizeof (wi));
6726      wi.info = d;
6727      walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6728
6729      if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6730	{
6731	  tree label = gimple_label_label (label_stmt);
6732	  int uid = LABEL_DECL_UID (label);
6733
6734	  gcc_assert (uid > -1);
6735
6736	  old_len = vec_safe_length (cfg->x_label_to_block_map);
6737	  if (old_len <= (unsigned) uid)
6738	    {
6739	      new_len = 3 * uid / 2 + 1;
6740	      vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6741	    }
6742
6743	  (*cfg->x_label_to_block_map)[uid] = bb;
6744	  (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6745
6746	  gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6747
6748	  if (uid >= dest_cfun->cfg->last_label_uid)
6749	    dest_cfun->cfg->last_label_uid = uid + 1;
6750	}
6751
6752      maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6753      remove_stmt_from_eh_lp_fn (cfun, stmt);
6754
6755      gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6756      gimple_remove_stmt_histograms (cfun, stmt);
6757
6758      /* We cannot leave any operands allocated from the operand caches of
6759	 the current function.  */
6760      free_stmt_operands (cfun, stmt);
6761      push_cfun (dest_cfun);
6762      update_stmt (stmt);
6763      pop_cfun ();
6764    }
6765
6766  FOR_EACH_EDGE (e, ei, bb->succs)
6767    if (e->goto_locus != UNKNOWN_LOCATION)
6768      {
6769	tree block = LOCATION_BLOCK (e->goto_locus);
6770	if (d->orig_block == NULL_TREE
6771	    || block == d->orig_block)
6772	  e->goto_locus = d->new_block ?
6773	      COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6774	      LOCATION_LOCUS (e->goto_locus);
6775      }
6776}
6777
6778/* Examine the statements in BB (which is in SRC_CFUN); find and return
6779   the outermost EH region.  Use REGION as the incoming base EH region.  */
6780
6781static eh_region
6782find_outermost_region_in_block (struct function *src_cfun,
6783				basic_block bb, eh_region region)
6784{
6785  gimple_stmt_iterator si;
6786
6787  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6788    {
6789      gimple stmt = gsi_stmt (si);
6790      eh_region stmt_region;
6791      int lp_nr;
6792
6793      lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6794      stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6795      if (stmt_region)
6796	{
6797	  if (region == NULL)
6798	    region = stmt_region;
6799	  else if (stmt_region != region)
6800	    {
6801	      region = eh_region_outermost (src_cfun, stmt_region, region);
6802	      gcc_assert (region != NULL);
6803	    }
6804	}
6805    }
6806
6807  return region;
6808}
6809
6810static tree
6811new_label_mapper (tree decl, void *data)
6812{
6813  htab_t hash = (htab_t) data;
6814  struct tree_map *m;
6815  void **slot;
6816
6817  gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6818
6819  m = XNEW (struct tree_map);
6820  m->hash = DECL_UID (decl);
6821  m->base.from = decl;
6822  m->to = create_artificial_label (UNKNOWN_LOCATION);
6823  LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6824  if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6825    cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6826
6827  slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6828  gcc_assert (*slot == NULL);
6829
6830  *slot = m;
6831
6832  return m->to;
6833}
6834
6835/* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6836   subblocks.  */
6837
6838static void
6839replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6840				  tree to_context)
6841{
6842  tree *tp, t;
6843
6844  for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6845    {
6846      t = *tp;
6847      if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6848	continue;
6849      replace_by_duplicate_decl (&t, vars_map, to_context);
6850      if (t != *tp)
6851	{
6852	  if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6853	    {
6854	      SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6855	      DECL_HAS_VALUE_EXPR_P (t) = 1;
6856	    }
6857	  DECL_CHAIN (t) = DECL_CHAIN (*tp);
6858	  *tp = t;
6859	}
6860    }
6861
6862  for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6863    replace_block_vars_by_duplicates (block, vars_map, to_context);
6864}
6865
6866/* Fixup the loop arrays and numbers after moving LOOP and its subloops
6867   from FN1 to FN2.  */
6868
6869static void
6870fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6871			      struct loop *loop)
6872{
6873  /* Discard it from the old loop array.  */
6874  (*get_loops (fn1))[loop->num] = NULL;
6875
6876  /* Place it in the new loop array, assigning it a new number.  */
6877  loop->num = number_of_loops (fn2);
6878  vec_safe_push (loops_for_fn (fn2)->larray, loop);
6879
6880  /* Recurse to children.  */
6881  for (loop = loop->inner; loop; loop = loop->next)
6882    fixup_loop_arrays_after_move (fn1, fn2, loop);
6883}
6884
6885/* Verify that the blocks in BBS_P are a single-entry, single-exit region
6886   delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks.  */
6887
6888DEBUG_FUNCTION void
6889verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6890{
6891  basic_block bb;
6892  edge_iterator ei;
6893  edge e;
6894  bitmap bbs = BITMAP_ALLOC (NULL);
6895  int i;
6896
6897  gcc_assert (entry != NULL);
6898  gcc_assert (entry != exit);
6899  gcc_assert (bbs_p != NULL);
6900
6901  gcc_assert (bbs_p->length () > 0);
6902
6903  FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6904    bitmap_set_bit (bbs, bb->index);
6905
6906  gcc_assert (bitmap_bit_p (bbs, entry->index));
6907  gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6908
6909  FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6910    {
6911      if (bb == entry)
6912	{
6913	  gcc_assert (single_pred_p (entry));
6914	  gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6915	}
6916      else
6917	for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6918	  {
6919	    e = ei_edge (ei);
6920	    gcc_assert (bitmap_bit_p (bbs, e->src->index));
6921	  }
6922
6923      if (bb == exit)
6924	{
6925	  gcc_assert (single_succ_p (exit));
6926	  gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6927	}
6928      else
6929	for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6930	  {
6931	    e = ei_edge (ei);
6932	    gcc_assert (bitmap_bit_p (bbs, e->dest->index));
6933	  }
6934    }
6935
6936  BITMAP_FREE (bbs);
6937}
6938
6939
6940/* Move a single-entry, single-exit region delimited by ENTRY_BB and
6941   EXIT_BB to function DEST_CFUN.  The whole region is replaced by a
6942   single basic block in the original CFG and the new basic block is
6943   returned.  DEST_CFUN must not have a CFG yet.
6944
6945   Note that the region need not be a pure SESE region.  Blocks inside
6946   the region may contain calls to abort/exit.  The only restriction
6947   is that ENTRY_BB should be the only entry point and it must
6948   dominate EXIT_BB.
6949
6950   Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6951   functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6952   to the new function.
6953
6954   All local variables referenced in the region are assumed to be in
6955   the corresponding BLOCK_VARS and unexpanded variable lists
6956   associated with DEST_CFUN.  */
6957
6958basic_block
6959move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6960		        basic_block exit_bb, tree orig_block)
6961{
6962  vec<basic_block> bbs, dom_bbs;
6963  basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6964  basic_block after, bb, *entry_pred, *exit_succ, abb;
6965  struct function *saved_cfun = cfun;
6966  int *entry_flag, *exit_flag;
6967  unsigned *entry_prob, *exit_prob;
6968  unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6969  edge e;
6970  edge_iterator ei;
6971  htab_t new_label_map;
6972  hash_map<void *, void *> *eh_map;
6973  struct loop *loop = entry_bb->loop_father;
6974  struct loop *loop0 = get_loop (saved_cfun, 0);
6975  struct move_stmt_d d;
6976
6977  /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6978     region.  */
6979  gcc_assert (entry_bb != exit_bb
6980              && (!exit_bb
6981		  || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6982
6983  /* Collect all the blocks in the region.  Manually add ENTRY_BB
6984     because it won't be added by dfs_enumerate_from.  */
6985  bbs.create (0);
6986  bbs.safe_push (entry_bb);
6987  gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6988#ifdef ENABLE_CHECKING
6989  verify_sese (entry_bb, exit_bb, &bbs);
6990#endif
6991
6992  /* The blocks that used to be dominated by something in BBS will now be
6993     dominated by the new block.  */
6994  dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6995				     bbs.address (),
6996				     bbs.length ());
6997
6998  /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG.  We need to remember
6999     the predecessor edges to ENTRY_BB and the successor edges to
7000     EXIT_BB so that we can re-attach them to the new basic block that
7001     will replace the region.  */
7002  num_entry_edges = EDGE_COUNT (entry_bb->preds);
7003  entry_pred = XNEWVEC (basic_block, num_entry_edges);
7004  entry_flag = XNEWVEC (int, num_entry_edges);
7005  entry_prob = XNEWVEC (unsigned, num_entry_edges);
7006  i = 0;
7007  for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7008    {
7009      entry_prob[i] = e->probability;
7010      entry_flag[i] = e->flags;
7011      entry_pred[i++] = e->src;
7012      remove_edge (e);
7013    }
7014
7015  if (exit_bb)
7016    {
7017      num_exit_edges = EDGE_COUNT (exit_bb->succs);
7018      exit_succ = XNEWVEC (basic_block, num_exit_edges);
7019      exit_flag = XNEWVEC (int, num_exit_edges);
7020      exit_prob = XNEWVEC (unsigned, num_exit_edges);
7021      i = 0;
7022      for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7023	{
7024	  exit_prob[i] = e->probability;
7025	  exit_flag[i] = e->flags;
7026	  exit_succ[i++] = e->dest;
7027	  remove_edge (e);
7028	}
7029    }
7030  else
7031    {
7032      num_exit_edges = 0;
7033      exit_succ = NULL;
7034      exit_flag = NULL;
7035      exit_prob = NULL;
7036    }
7037
7038  /* Switch context to the child function to initialize DEST_FN's CFG.  */
7039  gcc_assert (dest_cfun->cfg == NULL);
7040  push_cfun (dest_cfun);
7041
7042  init_empty_tree_cfg ();
7043
7044  /* Initialize EH information for the new function.  */
7045  eh_map = NULL;
7046  new_label_map = NULL;
7047  if (saved_cfun->eh)
7048    {
7049      eh_region region = NULL;
7050
7051      FOR_EACH_VEC_ELT (bbs, i, bb)
7052	region = find_outermost_region_in_block (saved_cfun, bb, region);
7053
7054      init_eh_for_function ();
7055      if (region != NULL)
7056	{
7057	  new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7058	  eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7059					 new_label_mapper, new_label_map);
7060	}
7061    }
7062
7063  /* Initialize an empty loop tree.  */
7064  struct loops *loops = ggc_cleared_alloc<struct loops> ();
7065  init_loops_structure (dest_cfun, loops, 1);
7066  loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7067  set_loops_for_fn (dest_cfun, loops);
7068
7069  /* Move the outlined loop tree part.  */
7070  num_nodes = bbs.length ();
7071  FOR_EACH_VEC_ELT (bbs, i, bb)
7072    {
7073      if (bb->loop_father->header == bb)
7074	{
7075	  struct loop *this_loop = bb->loop_father;
7076	  struct loop *outer = loop_outer (this_loop);
7077	  if (outer == loop
7078	      /* If the SESE region contains some bbs ending with
7079		 a noreturn call, those are considered to belong
7080		 to the outermost loop in saved_cfun, rather than
7081		 the entry_bb's loop_father.  */
7082	      || outer == loop0)
7083	    {
7084	      if (outer != loop)
7085		num_nodes -= this_loop->num_nodes;
7086	      flow_loop_tree_node_remove (bb->loop_father);
7087	      flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7088	      fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7089	    }
7090	}
7091      else if (bb->loop_father == loop0 && loop0 != loop)
7092	num_nodes--;
7093
7094      /* Remove loop exits from the outlined region.  */
7095      if (loops_for_fn (saved_cfun)->exits)
7096	FOR_EACH_EDGE (e, ei, bb->succs)
7097	  {
7098	    struct loops *l = loops_for_fn (saved_cfun);
7099	    loop_exit **slot
7100	      = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7101					       NO_INSERT);
7102	    if (slot)
7103	      l->exits->clear_slot (slot);
7104	  }
7105    }
7106
7107
7108  /* Adjust the number of blocks in the tree root of the outlined part.  */
7109  get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7110
7111  /* Setup a mapping to be used by move_block_to_fn.  */
7112  loop->aux = current_loops->tree_root;
7113  loop0->aux = current_loops->tree_root;
7114
7115  pop_cfun ();
7116
7117  /* Move blocks from BBS into DEST_CFUN.  */
7118  gcc_assert (bbs.length () >= 2);
7119  after = dest_cfun->cfg->x_entry_block_ptr;
7120  hash_map<tree, tree> vars_map;
7121
7122  memset (&d, 0, sizeof (d));
7123  d.orig_block = orig_block;
7124  d.new_block = DECL_INITIAL (dest_cfun->decl);
7125  d.from_context = cfun->decl;
7126  d.to_context = dest_cfun->decl;
7127  d.vars_map = &vars_map;
7128  d.new_label_map = new_label_map;
7129  d.eh_map = eh_map;
7130  d.remap_decls_p = true;
7131
7132  FOR_EACH_VEC_ELT (bbs, i, bb)
7133    {
7134      /* No need to update edge counts on the last block.  It has
7135	 already been updated earlier when we detached the region from
7136	 the original CFG.  */
7137      move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7138      after = bb;
7139    }
7140
7141  loop->aux = NULL;
7142  loop0->aux = NULL;
7143  /* Loop sizes are no longer correct, fix them up.  */
7144  loop->num_nodes -= num_nodes;
7145  for (struct loop *outer = loop_outer (loop);
7146       outer; outer = loop_outer (outer))
7147    outer->num_nodes -= num_nodes;
7148  loop0->num_nodes -= bbs.length () - num_nodes;
7149
7150  if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7151    {
7152      struct loop *aloop;
7153      for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7154	if (aloop != NULL)
7155	  {
7156	    if (aloop->simduid)
7157	      {
7158		replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7159					   d.to_context);
7160		dest_cfun->has_simduid_loops = true;
7161	      }
7162	    if (aloop->force_vectorize)
7163	      dest_cfun->has_force_vectorize_loops = true;
7164	  }
7165    }
7166
7167  /* Rewire BLOCK_SUBBLOCKS of orig_block.  */
7168  if (orig_block)
7169    {
7170      tree block;
7171      gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7172		  == NULL_TREE);
7173      BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7174	= BLOCK_SUBBLOCKS (orig_block);
7175      for (block = BLOCK_SUBBLOCKS (orig_block);
7176	   block; block = BLOCK_CHAIN (block))
7177	BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7178      BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7179    }
7180
7181  replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7182				    &vars_map, dest_cfun->decl);
7183
7184  if (new_label_map)
7185    htab_delete (new_label_map);
7186  if (eh_map)
7187    delete eh_map;
7188
7189  /* Rewire the entry and exit blocks.  The successor to the entry
7190     block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7191     the child function.  Similarly, the predecessor of DEST_FN's
7192     EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR.  We
7193     need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7194     various CFG manipulation function get to the right CFG.
7195
7196     FIXME, this is silly.  The CFG ought to become a parameter to
7197     these helpers.  */
7198  push_cfun (dest_cfun);
7199  make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7200  if (exit_bb)
7201    make_edge (exit_bb,  EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7202  pop_cfun ();
7203
7204  /* Back in the original function, the SESE region has disappeared,
7205     create a new basic block in its place.  */
7206  bb = create_empty_bb (entry_pred[0]);
7207  if (current_loops)
7208    add_bb_to_loop (bb, loop);
7209  for (i = 0; i < num_entry_edges; i++)
7210    {
7211      e = make_edge (entry_pred[i], bb, entry_flag[i]);
7212      e->probability = entry_prob[i];
7213    }
7214
7215  for (i = 0; i < num_exit_edges; i++)
7216    {
7217      e = make_edge (bb, exit_succ[i], exit_flag[i]);
7218      e->probability = exit_prob[i];
7219    }
7220
7221  set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7222  FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7223    set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7224  dom_bbs.release ();
7225
7226  if (exit_bb)
7227    {
7228      free (exit_prob);
7229      free (exit_flag);
7230      free (exit_succ);
7231    }
7232  free (entry_prob);
7233  free (entry_flag);
7234  free (entry_pred);
7235  bbs.release ();
7236
7237  return bb;
7238}
7239
7240
7241/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7242   */
7243
7244void
7245dump_function_to_file (tree fndecl, FILE *file, int flags)
7246{
7247  tree arg, var, old_current_fndecl = current_function_decl;
7248  struct function *dsf;
7249  bool ignore_topmost_bind = false, any_var = false;
7250  basic_block bb;
7251  tree chain;
7252  bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7253		  && decl_is_tm_clone (fndecl));
7254  struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7255
7256  current_function_decl = fndecl;
7257  fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7258
7259  arg = DECL_ARGUMENTS (fndecl);
7260  while (arg)
7261    {
7262      print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7263      fprintf (file, " ");
7264      print_generic_expr (file, arg, dump_flags);
7265      if (flags & TDF_VERBOSE)
7266	print_node (file, "", arg, 4);
7267      if (DECL_CHAIN (arg))
7268	fprintf (file, ", ");
7269      arg = DECL_CHAIN (arg);
7270    }
7271  fprintf (file, ")\n");
7272
7273  if (flags & TDF_VERBOSE)
7274    print_node (file, "", fndecl, 2);
7275
7276  dsf = DECL_STRUCT_FUNCTION (fndecl);
7277  if (dsf && (flags & TDF_EH))
7278    dump_eh_tree (file, dsf);
7279
7280  if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7281    {
7282      dump_node (fndecl, TDF_SLIM | flags, file);
7283      current_function_decl = old_current_fndecl;
7284      return;
7285    }
7286
7287  /* When GIMPLE is lowered, the variables are no longer available in
7288     BIND_EXPRs, so display them separately.  */
7289  if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7290    {
7291      unsigned ix;
7292      ignore_topmost_bind = true;
7293
7294      fprintf (file, "{\n");
7295      if (!vec_safe_is_empty (fun->local_decls))
7296	FOR_EACH_LOCAL_DECL (fun, ix, var)
7297	  {
7298	    print_generic_decl (file, var, flags);
7299	    if (flags & TDF_VERBOSE)
7300	      print_node (file, "", var, 4);
7301	    fprintf (file, "\n");
7302
7303	    any_var = true;
7304	  }
7305      if (gimple_in_ssa_p (cfun))
7306	for (ix = 1; ix < num_ssa_names; ++ix)
7307	  {
7308	    tree name = ssa_name (ix);
7309	    if (name && !SSA_NAME_VAR (name))
7310	      {
7311		fprintf (file, "  ");
7312		print_generic_expr (file, TREE_TYPE (name), flags);
7313		fprintf (file, " ");
7314		print_generic_expr (file, name, flags);
7315		fprintf (file, ";\n");
7316
7317		any_var = true;
7318	      }
7319	  }
7320    }
7321
7322  if (fun && fun->decl == fndecl
7323      && fun->cfg
7324      && basic_block_info_for_fn (fun))
7325    {
7326      /* If the CFG has been built, emit a CFG-based dump.  */
7327      if (!ignore_topmost_bind)
7328	fprintf (file, "{\n");
7329
7330      if (any_var && n_basic_blocks_for_fn (fun))
7331	fprintf (file, "\n");
7332
7333      FOR_EACH_BB_FN (bb, fun)
7334	dump_bb (file, bb, 2, flags | TDF_COMMENT);
7335
7336      fprintf (file, "}\n");
7337    }
7338  else if (DECL_SAVED_TREE (fndecl) == NULL)
7339    {
7340      /* The function is now in GIMPLE form but the CFG has not been
7341	 built yet.  Emit the single sequence of GIMPLE statements
7342	 that make up its body.  */
7343      gimple_seq body = gimple_body (fndecl);
7344
7345      if (gimple_seq_first_stmt (body)
7346	  && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7347	  && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7348	print_gimple_seq (file, body, 0, flags);
7349      else
7350	{
7351	  if (!ignore_topmost_bind)
7352	    fprintf (file, "{\n");
7353
7354	  if (any_var)
7355	    fprintf (file, "\n");
7356
7357	  print_gimple_seq (file, body, 2, flags);
7358	  fprintf (file, "}\n");
7359	}
7360    }
7361  else
7362    {
7363      int indent;
7364
7365      /* Make a tree based dump.  */
7366      chain = DECL_SAVED_TREE (fndecl);
7367      if (chain && TREE_CODE (chain) == BIND_EXPR)
7368	{
7369	  if (ignore_topmost_bind)
7370	    {
7371	      chain = BIND_EXPR_BODY (chain);
7372	      indent = 2;
7373	    }
7374	  else
7375	    indent = 0;
7376	}
7377      else
7378	{
7379	  if (!ignore_topmost_bind)
7380	    fprintf (file, "{\n");
7381	  indent = 2;
7382	}
7383
7384      if (any_var)
7385	fprintf (file, "\n");
7386
7387      print_generic_stmt_indented (file, chain, flags, indent);
7388      if (ignore_topmost_bind)
7389	fprintf (file, "}\n");
7390    }
7391
7392  if (flags & TDF_ENUMERATE_LOCALS)
7393    dump_enumerated_decls (file, flags);
7394  fprintf (file, "\n\n");
7395
7396  current_function_decl = old_current_fndecl;
7397}
7398
7399/* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h)  */
7400
7401DEBUG_FUNCTION void
7402debug_function (tree fn, int flags)
7403{
7404  dump_function_to_file (fn, stderr, flags);
7405}
7406
7407
7408/* Print on FILE the indexes for the predecessors of basic_block BB.  */
7409
7410static void
7411print_pred_bbs (FILE *file, basic_block bb)
7412{
7413  edge e;
7414  edge_iterator ei;
7415
7416  FOR_EACH_EDGE (e, ei, bb->preds)
7417    fprintf (file, "bb_%d ", e->src->index);
7418}
7419
7420
7421/* Print on FILE the indexes for the successors of basic_block BB.  */
7422
7423static void
7424print_succ_bbs (FILE *file, basic_block bb)
7425{
7426  edge e;
7427  edge_iterator ei;
7428
7429  FOR_EACH_EDGE (e, ei, bb->succs)
7430    fprintf (file, "bb_%d ", e->dest->index);
7431}
7432
7433/* Print to FILE the basic block BB following the VERBOSITY level.  */
7434
7435void
7436print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7437{
7438  char *s_indent = (char *) alloca ((size_t) indent + 1);
7439  memset ((void *) s_indent, ' ', (size_t) indent);
7440  s_indent[indent] = '\0';
7441
7442  /* Print basic_block's header.  */
7443  if (verbosity >= 2)
7444    {
7445      fprintf (file, "%s  bb_%d (preds = {", s_indent, bb->index);
7446      print_pred_bbs (file, bb);
7447      fprintf (file, "}, succs = {");
7448      print_succ_bbs (file, bb);
7449      fprintf (file, "})\n");
7450    }
7451
7452  /* Print basic_block's body.  */
7453  if (verbosity >= 3)
7454    {
7455      fprintf (file, "%s  {\n", s_indent);
7456      dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7457      fprintf (file, "%s  }\n", s_indent);
7458    }
7459}
7460
7461static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7462
7463/* Pretty print LOOP on FILE, indented INDENT spaces.  Following
7464   VERBOSITY level this outputs the contents of the loop, or just its
7465   structure.  */
7466
7467static void
7468print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7469{
7470  char *s_indent;
7471  basic_block bb;
7472
7473  if (loop == NULL)
7474    return;
7475
7476  s_indent = (char *) alloca ((size_t) indent + 1);
7477  memset ((void *) s_indent, ' ', (size_t) indent);
7478  s_indent[indent] = '\0';
7479
7480  /* Print loop's header.  */
7481  fprintf (file, "%sloop_%d (", s_indent, loop->num);
7482  if (loop->header)
7483    fprintf (file, "header = %d", loop->header->index);
7484  else
7485    {
7486      fprintf (file, "deleted)\n");
7487      return;
7488    }
7489  if (loop->latch)
7490    fprintf (file, ", latch = %d", loop->latch->index);
7491  else
7492    fprintf (file, ", multiple latches");
7493  fprintf (file, ", niter = ");
7494  print_generic_expr (file, loop->nb_iterations, 0);
7495
7496  if (loop->any_upper_bound)
7497    {
7498      fprintf (file, ", upper_bound = ");
7499      print_decu (loop->nb_iterations_upper_bound, file);
7500    }
7501
7502  if (loop->any_estimate)
7503    {
7504      fprintf (file, ", estimate = ");
7505      print_decu (loop->nb_iterations_estimate, file);
7506    }
7507  fprintf (file, ")\n");
7508
7509  /* Print loop's body.  */
7510  if (verbosity >= 1)
7511    {
7512      fprintf (file, "%s{\n", s_indent);
7513      FOR_EACH_BB_FN (bb, cfun)
7514	if (bb->loop_father == loop)
7515	  print_loops_bb (file, bb, indent, verbosity);
7516
7517      print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7518      fprintf (file, "%s}\n", s_indent);
7519    }
7520}
7521
7522/* Print the LOOP and its sibling loops on FILE, indented INDENT
7523   spaces.  Following VERBOSITY level this outputs the contents of the
7524   loop, or just its structure.  */
7525
7526static void
7527print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7528			 int verbosity)
7529{
7530  if (loop == NULL)
7531    return;
7532
7533  print_loop (file, loop, indent, verbosity);
7534  print_loop_and_siblings (file, loop->next, indent, verbosity);
7535}
7536
7537/* Follow a CFG edge from the entry point of the program, and on entry
7538   of a loop, pretty print the loop structure on FILE.  */
7539
7540void
7541print_loops (FILE *file, int verbosity)
7542{
7543  basic_block bb;
7544
7545  bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7546  if (bb && bb->loop_father)
7547    print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7548}
7549
7550/* Dump a loop.  */
7551
7552DEBUG_FUNCTION void
7553debug (struct loop &ref)
7554{
7555  print_loop (stderr, &ref, 0, /*verbosity*/0);
7556}
7557
7558DEBUG_FUNCTION void
7559debug (struct loop *ptr)
7560{
7561  if (ptr)
7562    debug (*ptr);
7563  else
7564    fprintf (stderr, "<nil>\n");
7565}
7566
7567/* Dump a loop verbosely.  */
7568
7569DEBUG_FUNCTION void
7570debug_verbose (struct loop &ref)
7571{
7572  print_loop (stderr, &ref, 0, /*verbosity*/3);
7573}
7574
7575DEBUG_FUNCTION void
7576debug_verbose (struct loop *ptr)
7577{
7578  if (ptr)
7579    debug (*ptr);
7580  else
7581    fprintf (stderr, "<nil>\n");
7582}
7583
7584
7585/* Debugging loops structure at tree level, at some VERBOSITY level.  */
7586
7587DEBUG_FUNCTION void
7588debug_loops (int verbosity)
7589{
7590  print_loops (stderr, verbosity);
7591}
7592
7593/* Print on stderr the code of LOOP, at some VERBOSITY level.  */
7594
7595DEBUG_FUNCTION void
7596debug_loop (struct loop *loop, int verbosity)
7597{
7598  print_loop (stderr, loop, 0, verbosity);
7599}
7600
7601/* Print on stderr the code of loop number NUM, at some VERBOSITY
7602   level.  */
7603
7604DEBUG_FUNCTION void
7605debug_loop_num (unsigned num, int verbosity)
7606{
7607  debug_loop (get_loop (cfun, num), verbosity);
7608}
7609
7610/* Return true if BB ends with a call, possibly followed by some
7611   instructions that must stay with the call.  Return false,
7612   otherwise.  */
7613
7614static bool
7615gimple_block_ends_with_call_p (basic_block bb)
7616{
7617  gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7618  return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7619}
7620
7621
7622/* Return true if BB ends with a conditional branch.  Return false,
7623   otherwise.  */
7624
7625static bool
7626gimple_block_ends_with_condjump_p (const_basic_block bb)
7627{
7628  gimple stmt = last_stmt (CONST_CAST_BB (bb));
7629  return (stmt && gimple_code (stmt) == GIMPLE_COND);
7630}
7631
7632
7633/* Return true if we need to add fake edge to exit at statement T.
7634   Helper function for gimple_flow_call_edges_add.  */
7635
7636static bool
7637need_fake_edge_p (gimple t)
7638{
7639  tree fndecl = NULL_TREE;
7640  int call_flags = 0;
7641
7642  /* NORETURN and LONGJMP calls already have an edge to exit.
7643     CONST and PURE calls do not need one.
7644     We don't currently check for CONST and PURE here, although
7645     it would be a good idea, because those attributes are
7646     figured out from the RTL in mark_constant_function, and
7647     the counter incrementation code from -fprofile-arcs
7648     leads to different results from -fbranch-probabilities.  */
7649  if (is_gimple_call (t))
7650    {
7651      fndecl = gimple_call_fndecl (t);
7652      call_flags = gimple_call_flags (t);
7653    }
7654
7655  if (is_gimple_call (t)
7656      && fndecl
7657      && DECL_BUILT_IN (fndecl)
7658      && (call_flags & ECF_NOTHROW)
7659      && !(call_flags & ECF_RETURNS_TWICE)
7660      /* fork() doesn't really return twice, but the effect of
7661         wrapping it in __gcov_fork() which calls __gcov_flush()
7662	 and clears the counters before forking has the same
7663	 effect as returning twice.  Force a fake edge.  */
7664      && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7665	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7666    return false;
7667
7668  if (is_gimple_call (t))
7669    {
7670      edge_iterator ei;
7671      edge e;
7672      basic_block bb;
7673
7674      if (!(call_flags & ECF_NORETURN))
7675	return true;
7676
7677      bb = gimple_bb (t);
7678      FOR_EACH_EDGE (e, ei, bb->succs)
7679	if ((e->flags & EDGE_FAKE) == 0)
7680	  return true;
7681    }
7682
7683  if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7684    if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7685      return true;
7686
7687  return false;
7688}
7689
7690
7691/* Add fake edges to the function exit for any non constant and non
7692   noreturn calls (or noreturn calls with EH/abnormal edges),
7693   volatile inline assembly in the bitmap of blocks specified by BLOCKS
7694   or to the whole CFG if BLOCKS is zero.  Return the number of blocks
7695   that were split.
7696
7697   The goal is to expose cases in which entering a basic block does
7698   not imply that all subsequent instructions must be executed.  */
7699
7700static int
7701gimple_flow_call_edges_add (sbitmap blocks)
7702{
7703  int i;
7704  int blocks_split = 0;
7705  int last_bb = last_basic_block_for_fn (cfun);
7706  bool check_last_block = false;
7707
7708  if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7709    return 0;
7710
7711  if (! blocks)
7712    check_last_block = true;
7713  else
7714    check_last_block = bitmap_bit_p (blocks,
7715				     EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7716
7717  /* In the last basic block, before epilogue generation, there will be
7718     a fallthru edge to EXIT.  Special care is required if the last insn
7719     of the last basic block is a call because make_edge folds duplicate
7720     edges, which would result in the fallthru edge also being marked
7721     fake, which would result in the fallthru edge being removed by
7722     remove_fake_edges, which would result in an invalid CFG.
7723
7724     Moreover, we can't elide the outgoing fake edge, since the block
7725     profiler needs to take this into account in order to solve the minimal
7726     spanning tree in the case that the call doesn't return.
7727
7728     Handle this by adding a dummy instruction in a new last basic block.  */
7729  if (check_last_block)
7730    {
7731      basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7732      gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7733      gimple t = NULL;
7734
7735      if (!gsi_end_p (gsi))
7736	t = gsi_stmt (gsi);
7737
7738      if (t && need_fake_edge_p (t))
7739	{
7740	  edge e;
7741
7742	  e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7743	  if (e)
7744	    {
7745	      gsi_insert_on_edge (e, gimple_build_nop ());
7746	      gsi_commit_edge_inserts ();
7747	    }
7748	}
7749    }
7750
7751  /* Now add fake edges to the function exit for any non constant
7752     calls since there is no way that we can determine if they will
7753     return or not...  */
7754  for (i = 0; i < last_bb; i++)
7755    {
7756      basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7757      gimple_stmt_iterator gsi;
7758      gimple stmt, last_stmt;
7759
7760      if (!bb)
7761	continue;
7762
7763      if (blocks && !bitmap_bit_p (blocks, i))
7764	continue;
7765
7766      gsi = gsi_last_nondebug_bb (bb);
7767      if (!gsi_end_p (gsi))
7768	{
7769	  last_stmt = gsi_stmt (gsi);
7770	  do
7771	    {
7772	      stmt = gsi_stmt (gsi);
7773	      if (need_fake_edge_p (stmt))
7774		{
7775		  edge e;
7776
7777		  /* The handling above of the final block before the
7778		     epilogue should be enough to verify that there is
7779		     no edge to the exit block in CFG already.
7780		     Calling make_edge in such case would cause us to
7781		     mark that edge as fake and remove it later.  */
7782#ifdef ENABLE_CHECKING
7783		  if (stmt == last_stmt)
7784		    {
7785		      e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7786		      gcc_assert (e == NULL);
7787		    }
7788#endif
7789
7790		  /* Note that the following may create a new basic block
7791		     and renumber the existing basic blocks.  */
7792		  if (stmt != last_stmt)
7793		    {
7794		      e = split_block (bb, stmt);
7795		      if (e)
7796			blocks_split++;
7797		    }
7798		  make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7799		}
7800	      gsi_prev (&gsi);
7801	    }
7802	  while (!gsi_end_p (gsi));
7803	}
7804    }
7805
7806  if (blocks_split)
7807    verify_flow_info ();
7808
7809  return blocks_split;
7810}
7811
7812/* Removes edge E and all the blocks dominated by it, and updates dominance
7813   information.  The IL in E->src needs to be updated separately.
7814   If dominance info is not available, only the edge E is removed.*/
7815
7816void
7817remove_edge_and_dominated_blocks (edge e)
7818{
7819  vec<basic_block> bbs_to_remove = vNULL;
7820  vec<basic_block> bbs_to_fix_dom = vNULL;
7821  bitmap df, df_idom;
7822  edge f;
7823  edge_iterator ei;
7824  bool none_removed = false;
7825  unsigned i;
7826  basic_block bb, dbb;
7827  bitmap_iterator bi;
7828
7829  /* If we are removing a path inside a non-root loop that may change
7830     loop ownership of blocks or remove loops.  Mark loops for fixup.  */
7831  if (current_loops
7832      && loop_outer (e->src->loop_father) != NULL
7833      && e->src->loop_father == e->dest->loop_father)
7834    loops_state_set (LOOPS_NEED_FIXUP);
7835
7836  if (!dom_info_available_p (CDI_DOMINATORS))
7837    {
7838      remove_edge (e);
7839      return;
7840    }
7841
7842  /* No updating is needed for edges to exit.  */
7843  if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7844    {
7845      if (cfgcleanup_altered_bbs)
7846	bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7847      remove_edge (e);
7848      return;
7849    }
7850
7851  /* First, we find the basic blocks to remove.  If E->dest has a predecessor
7852     that is not dominated by E->dest, then this set is empty.  Otherwise,
7853     all the basic blocks dominated by E->dest are removed.
7854
7855     Also, to DF_IDOM we store the immediate dominators of the blocks in
7856     the dominance frontier of E (i.e., of the successors of the
7857     removed blocks, if there are any, and of E->dest otherwise).  */
7858  FOR_EACH_EDGE (f, ei, e->dest->preds)
7859    {
7860      if (f == e)
7861	continue;
7862
7863      if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7864	{
7865	  none_removed = true;
7866	  break;
7867	}
7868    }
7869
7870  df = BITMAP_ALLOC (NULL);
7871  df_idom = BITMAP_ALLOC (NULL);
7872
7873  if (none_removed)
7874    bitmap_set_bit (df_idom,
7875		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7876  else
7877    {
7878      bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7879      FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7880	{
7881	  FOR_EACH_EDGE (f, ei, bb->succs)
7882	    {
7883	      if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7884		bitmap_set_bit (df, f->dest->index);
7885	    }
7886	}
7887      FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7888	bitmap_clear_bit (df, bb->index);
7889
7890      EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7891	{
7892	  bb = BASIC_BLOCK_FOR_FN (cfun, i);
7893	  bitmap_set_bit (df_idom,
7894			  get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7895	}
7896    }
7897
7898  if (cfgcleanup_altered_bbs)
7899    {
7900      /* Record the set of the altered basic blocks.  */
7901      bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7902      bitmap_ior_into (cfgcleanup_altered_bbs, df);
7903    }
7904
7905  /* Remove E and the cancelled blocks.  */
7906  if (none_removed)
7907    remove_edge (e);
7908  else
7909    {
7910      /* Walk backwards so as to get a chance to substitute all
7911	 released DEFs into debug stmts.  See
7912	 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7913	 details.  */
7914      for (i = bbs_to_remove.length (); i-- > 0; )
7915	delete_basic_block (bbs_to_remove[i]);
7916    }
7917
7918  /* Update the dominance information.  The immediate dominator may change only
7919     for blocks whose immediate dominator belongs to DF_IDOM:
7920
7921     Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7922     removal.  Let Z the arbitrary block such that idom(Z) = Y and
7923     Z dominates X after the removal.  Before removal, there exists a path P
7924     from Y to X that avoids Z.  Let F be the last edge on P that is
7925     removed, and let W = F->dest.  Before removal, idom(W) = Y (since Y
7926     dominates W, and because of P, Z does not dominate W), and W belongs to
7927     the dominance frontier of E.  Therefore, Y belongs to DF_IDOM.  */
7928  EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7929    {
7930      bb = BASIC_BLOCK_FOR_FN (cfun, i);
7931      for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7932	   dbb;
7933	   dbb = next_dom_son (CDI_DOMINATORS, dbb))
7934	bbs_to_fix_dom.safe_push (dbb);
7935    }
7936
7937  iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7938
7939  BITMAP_FREE (df);
7940  BITMAP_FREE (df_idom);
7941  bbs_to_remove.release ();
7942  bbs_to_fix_dom.release ();
7943}
7944
7945/* Purge dead EH edges from basic block BB.  */
7946
7947bool
7948gimple_purge_dead_eh_edges (basic_block bb)
7949{
7950  bool changed = false;
7951  edge e;
7952  edge_iterator ei;
7953  gimple stmt = last_stmt (bb);
7954
7955  if (stmt && stmt_can_throw_internal (stmt))
7956    return false;
7957
7958  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7959    {
7960      if (e->flags & EDGE_EH)
7961	{
7962	  remove_edge_and_dominated_blocks (e);
7963	  changed = true;
7964	}
7965      else
7966	ei_next (&ei);
7967    }
7968
7969  return changed;
7970}
7971
7972/* Purge dead EH edges from basic block listed in BLOCKS.  */
7973
7974bool
7975gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7976{
7977  bool changed = false;
7978  unsigned i;
7979  bitmap_iterator bi;
7980
7981  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7982    {
7983      basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7984
7985      /* Earlier gimple_purge_dead_eh_edges could have removed
7986	 this basic block already.  */
7987      gcc_assert (bb || changed);
7988      if (bb != NULL)
7989	changed |= gimple_purge_dead_eh_edges (bb);
7990    }
7991
7992  return changed;
7993}
7994
7995/* Purge dead abnormal call edges from basic block BB.  */
7996
7997bool
7998gimple_purge_dead_abnormal_call_edges (basic_block bb)
7999{
8000  bool changed = false;
8001  edge e;
8002  edge_iterator ei;
8003  gimple stmt = last_stmt (bb);
8004
8005  if (!cfun->has_nonlocal_label
8006      && !cfun->calls_setjmp)
8007    return false;
8008
8009  if (stmt && stmt_can_make_abnormal_goto (stmt))
8010    return false;
8011
8012  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8013    {
8014      if (e->flags & EDGE_ABNORMAL)
8015	{
8016	  if (e->flags & EDGE_FALLTHRU)
8017	    e->flags &= ~EDGE_ABNORMAL;
8018	  else
8019	    remove_edge_and_dominated_blocks (e);
8020	  changed = true;
8021	}
8022      else
8023	ei_next (&ei);
8024    }
8025
8026  return changed;
8027}
8028
8029/* Purge dead abnormal call edges from basic block listed in BLOCKS.  */
8030
8031bool
8032gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8033{
8034  bool changed = false;
8035  unsigned i;
8036  bitmap_iterator bi;
8037
8038  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8039    {
8040      basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8041
8042      /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8043	 this basic block already.  */
8044      gcc_assert (bb || changed);
8045      if (bb != NULL)
8046	changed |= gimple_purge_dead_abnormal_call_edges (bb);
8047    }
8048
8049  return changed;
8050}
8051
8052/* This function is called whenever a new edge is created or
8053   redirected.  */
8054
8055static void
8056gimple_execute_on_growing_pred (edge e)
8057{
8058  basic_block bb = e->dest;
8059
8060  if (!gimple_seq_empty_p (phi_nodes (bb)))
8061    reserve_phi_args_for_new_edge (bb);
8062}
8063
8064/* This function is called immediately before edge E is removed from
8065   the edge vector E->dest->preds.  */
8066
8067static void
8068gimple_execute_on_shrinking_pred (edge e)
8069{
8070  if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8071    remove_phi_args (e);
8072}
8073
8074/*---------------------------------------------------------------------------
8075  Helper functions for Loop versioning
8076  ---------------------------------------------------------------------------*/
8077
8078/* Adjust phi nodes for 'first' basic block.  'second' basic block is a copy
8079   of 'first'. Both of them are dominated by 'new_head' basic block. When
8080   'new_head' was created by 'second's incoming edge it received phi arguments
8081   on the edge by split_edge(). Later, additional edge 'e' was created to
8082   connect 'new_head' and 'first'. Now this routine adds phi args on this
8083   additional edge 'e' that new_head to second edge received as part of edge
8084   splitting.  */
8085
8086static void
8087gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8088				  basic_block new_head, edge e)
8089{
8090  gphi *phi1, *phi2;
8091  gphi_iterator psi1, psi2;
8092  tree def;
8093  edge e2 = find_edge (new_head, second);
8094
8095  /* Because NEW_HEAD has been created by splitting SECOND's incoming
8096     edge, we should always have an edge from NEW_HEAD to SECOND.  */
8097  gcc_assert (e2 != NULL);
8098
8099  /* Browse all 'second' basic block phi nodes and add phi args to
8100     edge 'e' for 'first' head. PHI args are always in correct order.  */
8101
8102  for (psi2 = gsi_start_phis (second),
8103       psi1 = gsi_start_phis (first);
8104       !gsi_end_p (psi2) && !gsi_end_p (psi1);
8105       gsi_next (&psi2),  gsi_next (&psi1))
8106    {
8107      phi1 = psi1.phi ();
8108      phi2 = psi2.phi ();
8109      def = PHI_ARG_DEF (phi2, e2->dest_idx);
8110      add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8111    }
8112}
8113
8114
8115/* Adds a if else statement to COND_BB with condition COND_EXPR.
8116   SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8117   the destination of the ELSE part.  */
8118
8119static void
8120gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8121			       basic_block second_head ATTRIBUTE_UNUSED,
8122			       basic_block cond_bb, void *cond_e)
8123{
8124  gimple_stmt_iterator gsi;
8125  gimple new_cond_expr;
8126  tree cond_expr = (tree) cond_e;
8127  edge e0;
8128
8129  /* Build new conditional expr */
8130  new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8131					       NULL_TREE, NULL_TREE);
8132
8133  /* Add new cond in cond_bb.  */
8134  gsi = gsi_last_bb (cond_bb);
8135  gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8136
8137  /* Adjust edges appropriately to connect new head with first head
8138     as well as second head.  */
8139  e0 = single_succ_edge (cond_bb);
8140  e0->flags &= ~EDGE_FALLTHRU;
8141  e0->flags |= EDGE_FALSE_VALUE;
8142}
8143
8144
8145/* Do book-keeping of basic block BB for the profile consistency checker.
8146   If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8147   then do post-pass accounting.  Store the counting in RECORD.  */
8148static void
8149gimple_account_profile_record (basic_block bb, int after_pass,
8150			       struct profile_record *record)
8151{
8152  gimple_stmt_iterator i;
8153  for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8154    {
8155      record->size[after_pass]
8156	+= estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8157      if (profile_status_for_fn (cfun) == PROFILE_READ)
8158	record->time[after_pass]
8159	  += estimate_num_insns (gsi_stmt (i),
8160				 &eni_time_weights) * bb->count;
8161      else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8162	record->time[after_pass]
8163	  += estimate_num_insns (gsi_stmt (i),
8164				 &eni_time_weights) * bb->frequency;
8165    }
8166}
8167
8168struct cfg_hooks gimple_cfg_hooks = {
8169  "gimple",
8170  gimple_verify_flow_info,
8171  gimple_dump_bb,		/* dump_bb  */
8172  gimple_dump_bb_for_graph,	/* dump_bb_for_graph  */
8173  create_bb,			/* create_basic_block  */
8174  gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
8175  gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
8176  gimple_can_remove_branch_p,	/* can_remove_branch_p  */
8177  remove_bb,			/* delete_basic_block  */
8178  gimple_split_block,		/* split_block  */
8179  gimple_move_block_after,	/* move_block_after  */
8180  gimple_can_merge_blocks_p,	/* can_merge_blocks_p  */
8181  gimple_merge_blocks,		/* merge_blocks  */
8182  gimple_predict_edge,		/* predict_edge  */
8183  gimple_predicted_by_p,	/* predicted_by_p  */
8184  gimple_can_duplicate_bb_p,	/* can_duplicate_block_p  */
8185  gimple_duplicate_bb,		/* duplicate_block  */
8186  gimple_split_edge,		/* split_edge  */
8187  gimple_make_forwarder_block,	/* make_forward_block  */
8188  NULL,				/* tidy_fallthru_edge  */
8189  NULL,				/* force_nonfallthru */
8190  gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8191  gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8192  gimple_flow_call_edges_add,   /* flow_call_edges_add */
8193  gimple_execute_on_growing_pred,	/* execute_on_growing_pred */
8194  gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8195  gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8196  gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8197  gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8198  extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8199  flush_pending_stmts, 		/* flush_pending_stmts */
8200  gimple_empty_block_p,           /* block_empty_p */
8201  gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8202  gimple_account_profile_record,
8203};
8204
8205
8206/* Split all critical edges.  */
8207
8208unsigned int
8209split_critical_edges (void)
8210{
8211  basic_block bb;
8212  edge e;
8213  edge_iterator ei;
8214
8215  /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8216     expensive.  So we want to enable recording of edge to CASE_LABEL_EXPR
8217     mappings around the calls to split_edge.  */
8218  start_recording_case_labels ();
8219  FOR_ALL_BB_FN (bb, cfun)
8220    {
8221      FOR_EACH_EDGE (e, ei, bb->succs)
8222        {
8223	  if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8224	    split_edge (e);
8225	  /* PRE inserts statements to edges and expects that
8226	     since split_critical_edges was done beforehand, committing edge
8227	     insertions will not split more edges.  In addition to critical
8228	     edges we must split edges that have multiple successors and
8229	     end by control flow statements, such as RESX.
8230	     Go ahead and split them too.  This matches the logic in
8231	     gimple_find_edge_insert_loc.  */
8232	  else if ((!single_pred_p (e->dest)
8233	            || !gimple_seq_empty_p (phi_nodes (e->dest))
8234		    || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8235		   && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8236	           && !(e->flags & EDGE_ABNORMAL))
8237	    {
8238	      gimple_stmt_iterator gsi;
8239
8240	      gsi = gsi_last_bb (e->src);
8241	      if (!gsi_end_p (gsi)
8242		  && stmt_ends_bb_p (gsi_stmt (gsi))
8243		  && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8244		      && !gimple_call_builtin_p (gsi_stmt (gsi),
8245						 BUILT_IN_RETURN)))
8246		split_edge (e);
8247	    }
8248	}
8249    }
8250  end_recording_case_labels ();
8251  return 0;
8252}
8253
8254namespace {
8255
8256const pass_data pass_data_split_crit_edges =
8257{
8258  GIMPLE_PASS, /* type */
8259  "crited", /* name */
8260  OPTGROUP_NONE, /* optinfo_flags */
8261  TV_TREE_SPLIT_EDGES, /* tv_id */
8262  PROP_cfg, /* properties_required */
8263  PROP_no_crit_edges, /* properties_provided */
8264  0, /* properties_destroyed */
8265  0, /* todo_flags_start */
8266  0, /* todo_flags_finish */
8267};
8268
8269class pass_split_crit_edges : public gimple_opt_pass
8270{
8271public:
8272  pass_split_crit_edges (gcc::context *ctxt)
8273    : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8274  {}
8275
8276  /* opt_pass methods: */
8277  virtual unsigned int execute (function *) { return split_critical_edges (); }
8278
8279  opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8280}; // class pass_split_crit_edges
8281
8282} // anon namespace
8283
8284gimple_opt_pass *
8285make_pass_split_crit_edges (gcc::context *ctxt)
8286{
8287  return new pass_split_crit_edges (ctxt);
8288}
8289
8290
8291/* Insert COND expression which is GIMPLE_COND after STMT
8292   in basic block BB with appropriate basic block split
8293   and creation of a new conditionally executed basic block.
8294   Return created basic block.  */
8295basic_block
8296insert_cond_bb (basic_block bb, gimple stmt, gimple cond)
8297{
8298  edge fall = split_block (bb, stmt);
8299  gimple_stmt_iterator iter = gsi_last_bb (bb);
8300  basic_block new_bb;
8301
8302  /* Insert cond statement.  */
8303  gcc_assert (gimple_code (cond) == GIMPLE_COND);
8304  if (gsi_end_p (iter))
8305    gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8306  else
8307    gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8308
8309  /* Create conditionally executed block.  */
8310  new_bb = create_empty_bb (bb);
8311  make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8312  make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8313
8314  /* Fix edge for split bb.  */
8315  fall->flags = EDGE_FALSE_VALUE;
8316
8317  /* Update dominance info.  */
8318  if (dom_info_available_p (CDI_DOMINATORS))
8319    {
8320      set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8321      set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8322    }
8323
8324  /* Update loop info.  */
8325  if (current_loops)
8326    add_bb_to_loop (new_bb, bb->loop_father);
8327
8328  return new_bb;
8329}
8330
8331/* Build a ternary operation and gimplify it.  Emit code before GSI.
8332   Return the gimple_val holding the result.  */
8333
8334tree
8335gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8336		 tree type, tree a, tree b, tree c)
8337{
8338  tree ret;
8339  location_t loc = gimple_location (gsi_stmt (*gsi));
8340
8341  ret = fold_build3_loc (loc, code, type, a, b, c);
8342  STRIP_NOPS (ret);
8343
8344  return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8345                                   GSI_SAME_STMT);
8346}
8347
8348/* Build a binary operation and gimplify it.  Emit code before GSI.
8349   Return the gimple_val holding the result.  */
8350
8351tree
8352gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8353		 tree type, tree a, tree b)
8354{
8355  tree ret;
8356
8357  ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8358  STRIP_NOPS (ret);
8359
8360  return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8361                                   GSI_SAME_STMT);
8362}
8363
8364/* Build a unary operation and gimplify it.  Emit code before GSI.
8365   Return the gimple_val holding the result.  */
8366
8367tree
8368gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8369		 tree a)
8370{
8371  tree ret;
8372
8373  ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8374  STRIP_NOPS (ret);
8375
8376  return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8377                                   GSI_SAME_STMT);
8378}
8379
8380
8381
8382/* Given a basic block B which ends with a conditional and has
8383   precisely two successors, determine which of the edges is taken if
8384   the conditional is true and which is taken if the conditional is
8385   false.  Set TRUE_EDGE and FALSE_EDGE appropriately.  */
8386
8387void
8388extract_true_false_edges_from_block (basic_block b,
8389				     edge *true_edge,
8390				     edge *false_edge)
8391{
8392  edge e = EDGE_SUCC (b, 0);
8393
8394  if (e->flags & EDGE_TRUE_VALUE)
8395    {
8396      *true_edge = e;
8397      *false_edge = EDGE_SUCC (b, 1);
8398    }
8399  else
8400    {
8401      *false_edge = e;
8402      *true_edge = EDGE_SUCC (b, 1);
8403    }
8404}
8405
8406/* Emit return warnings.  */
8407
8408namespace {
8409
8410const pass_data pass_data_warn_function_return =
8411{
8412  GIMPLE_PASS, /* type */
8413  "*warn_function_return", /* name */
8414  OPTGROUP_NONE, /* optinfo_flags */
8415  TV_NONE, /* tv_id */
8416  PROP_cfg, /* properties_required */
8417  0, /* properties_provided */
8418  0, /* properties_destroyed */
8419  0, /* todo_flags_start */
8420  0, /* todo_flags_finish */
8421};
8422
8423class pass_warn_function_return : public gimple_opt_pass
8424{
8425public:
8426  pass_warn_function_return (gcc::context *ctxt)
8427    : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8428  {}
8429
8430  /* opt_pass methods: */
8431  virtual unsigned int execute (function *);
8432
8433}; // class pass_warn_function_return
8434
8435unsigned int
8436pass_warn_function_return::execute (function *fun)
8437{
8438  source_location location;
8439  gimple last;
8440  edge e;
8441  edge_iterator ei;
8442
8443  if (!targetm.warn_func_return (fun->decl))
8444    return 0;
8445
8446  /* If we have a path to EXIT, then we do return.  */
8447  if (TREE_THIS_VOLATILE (fun->decl)
8448      && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8449    {
8450      location = UNKNOWN_LOCATION;
8451      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8452	{
8453	  last = last_stmt (e->src);
8454	  if ((gimple_code (last) == GIMPLE_RETURN
8455	       || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8456	      && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8457	    break;
8458	}
8459      if (location == UNKNOWN_LOCATION)
8460	location = cfun->function_end_locus;
8461      warning_at (location, 0, "%<noreturn%> function does return");
8462    }
8463
8464  /* If we see "return;" in some basic block, then we do reach the end
8465     without returning a value.  */
8466  else if (warn_return_type
8467	   && !TREE_NO_WARNING (fun->decl)
8468	   && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8469	   && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8470    {
8471      FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8472	{
8473	  gimple last = last_stmt (e->src);
8474	  greturn *return_stmt = dyn_cast <greturn *> (last);
8475	  if (return_stmt
8476	      && gimple_return_retval (return_stmt) == NULL
8477	      && !gimple_no_warning_p (last))
8478	    {
8479	      location = gimple_location (last);
8480	      if (location == UNKNOWN_LOCATION)
8481		location = fun->function_end_locus;
8482	      warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8483	      TREE_NO_WARNING (fun->decl) = 1;
8484	      break;
8485	    }
8486	}
8487    }
8488  return 0;
8489}
8490
8491} // anon namespace
8492
8493gimple_opt_pass *
8494make_pass_warn_function_return (gcc::context *ctxt)
8495{
8496  return new pass_warn_function_return (ctxt);
8497}
8498
8499/* Walk a gimplified function and warn for functions whose return value is
8500   ignored and attribute((warn_unused_result)) is set.  This is done before
8501   inlining, so we don't have to worry about that.  */
8502
8503static void
8504do_warn_unused_result (gimple_seq seq)
8505{
8506  tree fdecl, ftype;
8507  gimple_stmt_iterator i;
8508
8509  for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8510    {
8511      gimple g = gsi_stmt (i);
8512
8513      switch (gimple_code (g))
8514	{
8515	case GIMPLE_BIND:
8516	  do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8517	  break;
8518	case GIMPLE_TRY:
8519	  do_warn_unused_result (gimple_try_eval (g));
8520	  do_warn_unused_result (gimple_try_cleanup (g));
8521	  break;
8522	case GIMPLE_CATCH:
8523	  do_warn_unused_result (gimple_catch_handler (
8524				   as_a <gcatch *> (g)));
8525	  break;
8526	case GIMPLE_EH_FILTER:
8527	  do_warn_unused_result (gimple_eh_filter_failure (g));
8528	  break;
8529
8530	case GIMPLE_CALL:
8531	  if (gimple_call_lhs (g))
8532	    break;
8533	  if (gimple_call_internal_p (g))
8534	    break;
8535
8536	  /* This is a naked call, as opposed to a GIMPLE_CALL with an
8537	     LHS.  All calls whose value is ignored should be
8538	     represented like this.  Look for the attribute.  */
8539	  fdecl = gimple_call_fndecl (g);
8540	  ftype = gimple_call_fntype (g);
8541
8542	  if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8543	    {
8544	      location_t loc = gimple_location (g);
8545
8546	      if (fdecl)
8547		warning_at (loc, OPT_Wunused_result,
8548			    "ignoring return value of %qD, "
8549			    "declared with attribute warn_unused_result",
8550			    fdecl);
8551	      else
8552		warning_at (loc, OPT_Wunused_result,
8553			    "ignoring return value of function "
8554			    "declared with attribute warn_unused_result");
8555	    }
8556	  break;
8557
8558	default:
8559	  /* Not a container, not a call, or a call whose value is used.  */
8560	  break;
8561	}
8562    }
8563}
8564
8565namespace {
8566
8567const pass_data pass_data_warn_unused_result =
8568{
8569  GIMPLE_PASS, /* type */
8570  "*warn_unused_result", /* name */
8571  OPTGROUP_NONE, /* optinfo_flags */
8572  TV_NONE, /* tv_id */
8573  PROP_gimple_any, /* properties_required */
8574  0, /* properties_provided */
8575  0, /* properties_destroyed */
8576  0, /* todo_flags_start */
8577  0, /* todo_flags_finish */
8578};
8579
8580class pass_warn_unused_result : public gimple_opt_pass
8581{
8582public:
8583  pass_warn_unused_result (gcc::context *ctxt)
8584    : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8585  {}
8586
8587  /* opt_pass methods: */
8588  virtual bool gate (function *) { return flag_warn_unused_result; }
8589  virtual unsigned int execute (function *)
8590    {
8591      do_warn_unused_result (gimple_body (current_function_decl));
8592      return 0;
8593    }
8594
8595}; // class pass_warn_unused_result
8596
8597} // anon namespace
8598
8599gimple_opt_pass *
8600make_pass_warn_unused_result (gcc::context *ctxt)
8601{
8602  return new pass_warn_unused_result (ctxt);
8603}
8604
8605/* IPA passes, compilation of earlier functions or inlining
8606   might have changed some properties, such as marked functions nothrow,
8607   pure, const or noreturn.
8608   Remove redundant edges and basic blocks, and create new ones if necessary.
8609
8610   This pass can't be executed as stand alone pass from pass manager, because
8611   in between inlining and this fixup the verify_flow_info would fail.  */
8612
8613unsigned int
8614execute_fixup_cfg (void)
8615{
8616  basic_block bb;
8617  gimple_stmt_iterator gsi;
8618  int todo = 0;
8619  gcov_type count_scale;
8620  edge e;
8621  edge_iterator ei;
8622
8623  count_scale
8624      = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8625			    ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8626
8627  ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8628			    cgraph_node::get (current_function_decl)->count;
8629  EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8630			    apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8631                                       count_scale);
8632
8633  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8634    e->count = apply_scale (e->count, count_scale);
8635
8636  FOR_EACH_BB_FN (bb, cfun)
8637    {
8638      bb->count = apply_scale (bb->count, count_scale);
8639      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8640	{
8641	  gimple stmt = gsi_stmt (gsi);
8642	  tree decl = is_gimple_call (stmt)
8643		      ? gimple_call_fndecl (stmt)
8644		      : NULL;
8645	  if (decl)
8646	    {
8647	      int flags = gimple_call_flags (stmt);
8648	      if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8649		{
8650		  if (gimple_purge_dead_abnormal_call_edges (bb))
8651		    todo |= TODO_cleanup_cfg;
8652
8653		  if (gimple_in_ssa_p (cfun))
8654		    {
8655		      todo |= TODO_update_ssa | TODO_cleanup_cfg;
8656		      update_stmt (stmt);
8657		    }
8658		}
8659
8660	      if (flags & ECF_NORETURN
8661		  && fixup_noreturn_call (stmt))
8662		todo |= TODO_cleanup_cfg;
8663	     }
8664
8665	  /* Remove stores to variables we marked write-only.
8666	     Keep access when store has side effect, i.e. in case when source
8667	     is volatile.  */
8668	  if (gimple_store_p (stmt)
8669	      && !gimple_has_side_effects (stmt))
8670	    {
8671	      tree lhs = get_base_address (gimple_get_lhs (stmt));
8672
8673	      if (TREE_CODE (lhs) == VAR_DECL
8674		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8675		  && varpool_node::get (lhs)->writeonly)
8676		{
8677		  unlink_stmt_vdef (stmt);
8678		  gsi_remove (&gsi, true);
8679		  release_defs (stmt);
8680	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
8681	          continue;
8682		}
8683	    }
8684	  /* For calls we can simply remove LHS when it is known
8685	     to be write-only.  */
8686	  if (is_gimple_call (stmt)
8687	      && gimple_get_lhs (stmt))
8688	    {
8689	      tree lhs = get_base_address (gimple_get_lhs (stmt));
8690
8691	      if (TREE_CODE (lhs) == VAR_DECL
8692		  && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8693		  && varpool_node::get (lhs)->writeonly)
8694		{
8695		  gimple_call_set_lhs (stmt, NULL);
8696		  update_stmt (stmt);
8697	          todo |= TODO_update_ssa | TODO_cleanup_cfg;
8698		}
8699	    }
8700
8701	  if (maybe_clean_eh_stmt (stmt)
8702	      && gimple_purge_dead_eh_edges (bb))
8703	    todo |= TODO_cleanup_cfg;
8704	  gsi_next (&gsi);
8705	}
8706
8707      FOR_EACH_EDGE (e, ei, bb->succs)
8708        e->count = apply_scale (e->count, count_scale);
8709
8710      /* If we have a basic block with no successors that does not
8711	 end with a control statement or a noreturn call end it with
8712	 a call to __builtin_unreachable.  This situation can occur
8713	 when inlining a noreturn call that does in fact return.  */
8714      if (EDGE_COUNT (bb->succs) == 0)
8715	{
8716	  gimple stmt = last_stmt (bb);
8717	  if (!stmt
8718	      || (!is_ctrl_stmt (stmt)
8719		  && (!is_gimple_call (stmt)
8720		      || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8721	    {
8722	      if (stmt && is_gimple_call (stmt))
8723		gimple_call_set_ctrl_altering (stmt, false);
8724	      stmt = gimple_build_call
8725		  (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8726	      gimple_stmt_iterator gsi = gsi_last_bb (bb);
8727	      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8728	    }
8729	}
8730    }
8731  if (count_scale != REG_BR_PROB_BASE)
8732    compute_function_frequency ();
8733
8734  if (current_loops
8735      && (todo & TODO_cleanup_cfg))
8736    loops_state_set (LOOPS_NEED_FIXUP);
8737
8738  return todo;
8739}
8740
8741namespace {
8742
8743const pass_data pass_data_fixup_cfg =
8744{
8745  GIMPLE_PASS, /* type */
8746  "fixup_cfg", /* name */
8747  OPTGROUP_NONE, /* optinfo_flags */
8748  TV_NONE, /* tv_id */
8749  PROP_cfg, /* properties_required */
8750  0, /* properties_provided */
8751  0, /* properties_destroyed */
8752  0, /* todo_flags_start */
8753  0, /* todo_flags_finish */
8754};
8755
8756class pass_fixup_cfg : public gimple_opt_pass
8757{
8758public:
8759  pass_fixup_cfg (gcc::context *ctxt)
8760    : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8761  {}
8762
8763  /* opt_pass methods: */
8764  opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8765  virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8766
8767}; // class pass_fixup_cfg
8768
8769} // anon namespace
8770
8771gimple_opt_pass *
8772make_pass_fixup_cfg (gcc::context *ctxt)
8773{
8774  return new pass_fixup_cfg (ctxt);
8775}
8776
8777/* Garbage collection support for edge_def.  */
8778
8779extern void gt_ggc_mx (tree&);
8780extern void gt_ggc_mx (gimple&);
8781extern void gt_ggc_mx (rtx&);
8782extern void gt_ggc_mx (basic_block&);
8783
8784static void
8785gt_ggc_mx (rtx_insn *& x)
8786{
8787  if (x)
8788    gt_ggc_mx_rtx_def ((void *) x);
8789}
8790
8791void
8792gt_ggc_mx (edge_def *e)
8793{
8794  tree block = LOCATION_BLOCK (e->goto_locus);
8795  gt_ggc_mx (e->src);
8796  gt_ggc_mx (e->dest);
8797  if (current_ir_type () == IR_GIMPLE)
8798    gt_ggc_mx (e->insns.g);
8799  else
8800    gt_ggc_mx (e->insns.r);
8801  gt_ggc_mx (block);
8802}
8803
8804/* PCH support for edge_def.  */
8805
8806extern void gt_pch_nx (tree&);
8807extern void gt_pch_nx (gimple&);
8808extern void gt_pch_nx (rtx&);
8809extern void gt_pch_nx (basic_block&);
8810
8811static void
8812gt_pch_nx (rtx_insn *& x)
8813{
8814  if (x)
8815    gt_pch_nx_rtx_def ((void *) x);
8816}
8817
8818void
8819gt_pch_nx (edge_def *e)
8820{
8821  tree block = LOCATION_BLOCK (e->goto_locus);
8822  gt_pch_nx (e->src);
8823  gt_pch_nx (e->dest);
8824  if (current_ir_type () == IR_GIMPLE)
8825    gt_pch_nx (e->insns.g);
8826  else
8827    gt_pch_nx (e->insns.r);
8828  gt_pch_nx (block);
8829}
8830
8831void
8832gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8833{
8834  tree block = LOCATION_BLOCK (e->goto_locus);
8835  op (&(e->src), cookie);
8836  op (&(e->dest), cookie);
8837  if (current_ir_type () == IR_GIMPLE)
8838    op (&(e->insns.g), cookie);
8839  else
8840    op (&(e->insns.r), cookie);
8841  op (&(block), cookie);
8842}
8843