1/* Miscellaneous SSA utility functions.
2   Copyright (C) 2001-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "hash-set.h"
25#include "machmode.h"
26#include "vec.h"
27#include "double-int.h"
28#include "input.h"
29#include "alias.h"
30#include "symtab.h"
31#include "wide-int.h"
32#include "inchash.h"
33#include "tree.h"
34#include "fold-const.h"
35#include "stor-layout.h"
36#include "flags.h"
37#include "tm_p.h"
38#include "target.h"
39#include "langhooks.h"
40#include "predict.h"
41#include "hard-reg-set.h"
42#include "input.h"
43#include "function.h"
44#include "dominance.h"
45#include "cfg.h"
46#include "basic-block.h"
47#include "gimple-pretty-print.h"
48#include "tree-ssa-alias.h"
49#include "internal-fn.h"
50#include "gimple-fold.h"
51#include "gimple-expr.h"
52#include "is-a.h"
53#include "gimple.h"
54#include "gimplify.h"
55#include "gimple-iterator.h"
56#include "gimple-walk.h"
57#include "gimple-ssa.h"
58#include "tree-phinodes.h"
59#include "ssa-iterators.h"
60#include "stringpool.h"
61#include "tree-ssanames.h"
62#include "tree-ssa-loop-manip.h"
63#include "tree-into-ssa.h"
64#include "tree-ssa.h"
65#include "tree-inline.h"
66#include "hash-map.h"
67#include "tree-pass.h"
68#include "diagnostic-core.h"
69#include "cfgloop.h"
70#include "cfgexpand.h"
71
72/* Pointer map of variable mappings, keyed by edge.  */
73static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
74
75
76/* Add a mapping with PHI RESULT and PHI DEF associated with edge E.  */
77
78void
79redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
80{
81  edge_var_map new_node;
82
83  if (edge_var_maps == NULL)
84    edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
85
86  auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
87  new_node.def = def;
88  new_node.result = result;
89  new_node.locus = locus;
90
91  slot.safe_push (new_node);
92}
93
94
95/* Clear the var mappings in edge E.  */
96
97void
98redirect_edge_var_map_clear (edge e)
99{
100  if (!edge_var_maps)
101    return;
102
103  auto_vec<edge_var_map> *head = edge_var_maps->get (e);
104
105  if (head)
106    head->release ();
107}
108
109
110/* Duplicate the redirected var mappings in OLDE in NEWE.
111
112   This assumes a hash_map can have multiple edges mapping to the same
113   var_map (many to one mapping), since we don't remove the previous mappings.
114   */
115
116void
117redirect_edge_var_map_dup (edge newe, edge olde)
118{
119  if (!edge_var_maps)
120    return;
121
122  auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
123  auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
124  if (!old_head)
125    return;
126
127  new_head->safe_splice (*old_head);
128}
129
130
131/* Return the variable mappings for a given edge.  If there is none, return
132   NULL.  */
133
134vec<edge_var_map> *
135redirect_edge_var_map_vector (edge e)
136{
137  /* Hey, what kind of idiot would... you'd be surprised.  */
138  if (!edge_var_maps)
139    return NULL;
140
141  auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
142  if (!slot)
143    return NULL;
144
145  return slot;
146}
147
148/* Clear the edge variable mappings.  */
149
150void
151redirect_edge_var_map_destroy (void)
152{
153  delete edge_var_maps;
154  edge_var_maps = NULL;
155}
156
157
158/* Remove the corresponding arguments from the PHI nodes in E's
159   destination block and redirect it to DEST.  Return redirected edge.
160   The list of removed arguments is stored in a vector accessed
161   through edge_var_maps.  */
162
163edge
164ssa_redirect_edge (edge e, basic_block dest)
165{
166  gphi_iterator gsi;
167  gphi *phi;
168
169  redirect_edge_var_map_clear (e);
170
171  /* Remove the appropriate PHI arguments in E's destination block.  */
172  for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
173    {
174      tree def;
175      source_location locus ;
176
177      phi = gsi.phi ();
178      def = gimple_phi_arg_def (phi, e->dest_idx);
179      locus = gimple_phi_arg_location (phi, e->dest_idx);
180
181      if (def == NULL_TREE)
182	continue;
183
184      redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
185    }
186
187  e = redirect_edge_succ_nodup (e, dest);
188
189  return e;
190}
191
192
193/* Add PHI arguments queued in PENDING_STMT list on edge E to edge
194   E->dest.  */
195
196void
197flush_pending_stmts (edge e)
198{
199  gphi *phi;
200  edge_var_map *vm;
201  int i;
202  gphi_iterator gsi;
203
204  vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
205  if (!v)
206    return;
207
208  for (gsi = gsi_start_phis (e->dest), i = 0;
209       !gsi_end_p (gsi) && v->iterate (i, &vm);
210       gsi_next (&gsi), i++)
211    {
212      tree def;
213
214      phi = gsi.phi ();
215      def = redirect_edge_var_map_def (vm);
216      add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
217    }
218
219  redirect_edge_var_map_clear (e);
220}
221
222/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
223   GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
224   expression with a different value.
225
226   This will update any annotations (say debug bind stmts) referring
227   to the original LHS, so that they use the RHS instead.  This is
228   done even if NLHS and LHS are the same, for it is understood that
229   the RHS will be modified afterwards, and NLHS will not be assigned
230   an equivalent value.
231
232   Adjusting any non-annotation uses of the LHS, if needed, is a
233   responsibility of the caller.
234
235   The effect of this call should be pretty much the same as that of
236   inserting a copy of STMT before STMT, and then removing the
237   original stmt, at which time gsi_remove() would have update
238   annotations, but using this function saves all the inserting,
239   copying and removing.  */
240
241void
242gimple_replace_ssa_lhs (gimple stmt, tree nlhs)
243{
244  if (MAY_HAVE_DEBUG_STMTS)
245    {
246      tree lhs = gimple_get_lhs (stmt);
247
248      gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
249
250      insert_debug_temp_for_var_def (NULL, lhs);
251    }
252
253  gimple_set_lhs (stmt, nlhs);
254}
255
256
257/* Given a tree for an expression for which we might want to emit
258   locations or values in debug information (generally a variable, but
259   we might deal with other kinds of trees in the future), return the
260   tree that should be used as the variable of a DEBUG_BIND STMT or
261   VAR_LOCATION INSN or NOTE.  Return NULL if VAR is not to be tracked.  */
262
263tree
264target_for_debug_bind (tree var)
265{
266  if (!MAY_HAVE_DEBUG_STMTS)
267    return NULL_TREE;
268
269  if (TREE_CODE (var) == SSA_NAME)
270    {
271      var = SSA_NAME_VAR (var);
272      if (var == NULL_TREE)
273	return NULL_TREE;
274    }
275
276  if ((TREE_CODE (var) != VAR_DECL
277       || VAR_DECL_IS_VIRTUAL_OPERAND (var))
278      && TREE_CODE (var) != PARM_DECL)
279    return NULL_TREE;
280
281  if (DECL_HAS_VALUE_EXPR_P (var))
282    return target_for_debug_bind (DECL_VALUE_EXPR (var));
283
284  if (DECL_IGNORED_P (var))
285    return NULL_TREE;
286
287  /* var-tracking only tracks registers.  */
288  if (!is_gimple_reg_type (TREE_TYPE (var)))
289    return NULL_TREE;
290
291  return var;
292}
293
294/* Called via walk_tree, look for SSA_NAMEs that have already been
295   released.  */
296
297static tree
298find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
299{
300  struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
301
302  if (wi && wi->is_lhs)
303    return NULL_TREE;
304
305  if (TREE_CODE (*tp) == SSA_NAME)
306    {
307      if (SSA_NAME_IN_FREE_LIST (*tp))
308	return *tp;
309
310      *walk_subtrees = 0;
311    }
312  else if (IS_TYPE_OR_DECL_P (*tp))
313    *walk_subtrees = 0;
314
315  return NULL_TREE;
316}
317
318/* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
319   by other DEBUG stmts, and replace uses of the DEF with the
320   newly-created debug temp.  */
321
322void
323insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
324{
325  imm_use_iterator imm_iter;
326  use_operand_p use_p;
327  gimple stmt;
328  gimple def_stmt = NULL;
329  int usecount = 0;
330  tree value = NULL;
331
332  if (!MAY_HAVE_DEBUG_STMTS)
333    return;
334
335  /* If this name has already been registered for replacement, do nothing
336     as anything that uses this name isn't in SSA form.  */
337  if (name_registered_for_update_p (var))
338    return;
339
340  /* Check whether there are debug stmts that reference this variable and,
341     if there are, decide whether we should use a debug temp.  */
342  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
343    {
344      stmt = USE_STMT (use_p);
345
346      if (!gimple_debug_bind_p (stmt))
347	continue;
348
349      if (usecount++)
350	break;
351
352      if (gimple_debug_bind_get_value (stmt) != var)
353	{
354	  /* Count this as an additional use, so as to make sure we
355	     use a temp unless VAR's definition has a SINGLE_RHS that
356	     can be shared.  */
357	  usecount++;
358	  break;
359	}
360    }
361
362  if (!usecount)
363    return;
364
365  if (gsi)
366    def_stmt = gsi_stmt (*gsi);
367  else
368    def_stmt = SSA_NAME_DEF_STMT (var);
369
370  /* If we didn't get an insertion point, and the stmt has already
371     been removed, we won't be able to insert the debug bind stmt, so
372     we'll have to drop debug information.  */
373  if (gimple_code (def_stmt) == GIMPLE_PHI)
374    {
375      value = degenerate_phi_result (as_a <gphi *> (def_stmt));
376      if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
377	value = NULL;
378      /* error_mark_node is what fixup_noreturn_call changes PHI arguments
379	 to.  */
380      else if (value == error_mark_node)
381	value = NULL;
382    }
383  else if (is_gimple_assign (def_stmt))
384    {
385      bool no_value = false;
386
387      if (!dom_info_available_p (CDI_DOMINATORS))
388	{
389	  struct walk_stmt_info wi;
390
391	  memset (&wi, 0, sizeof (wi));
392
393	  /* When removing blocks without following reverse dominance
394	     order, we may sometimes encounter SSA_NAMEs that have
395	     already been released, referenced in other SSA_DEFs that
396	     we're about to release.  Consider:
397
398	     <bb X>:
399	     v_1 = foo;
400
401	     <bb Y>:
402	     w_2 = v_1 + bar;
403	     # DEBUG w => w_2
404
405	     If we deleted BB X first, propagating the value of w_2
406	     won't do us any good.  It's too late to recover their
407	     original definition of v_1: when it was deleted, it was
408	     only referenced in other DEFs, it couldn't possibly know
409	     it should have been retained, and propagating every
410	     single DEF just in case it might have to be propagated
411	     into a DEBUG STMT would probably be too wasteful.
412
413	     When dominator information is not readily available, we
414	     check for and accept some loss of debug information.  But
415	     if it is available, there's no excuse for us to remove
416	     blocks in the wrong order, so we don't even check for
417	     dead SSA NAMEs.  SSA verification shall catch any
418	     errors.  */
419	  if ((!gsi && !gimple_bb (def_stmt))
420	      || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
421	    no_value = true;
422	}
423
424      if (!no_value)
425	value = gimple_assign_rhs_to_tree (def_stmt);
426    }
427
428  if (value)
429    {
430      /* If there's a single use of VAR, and VAR is the entire debug
431	 expression (usecount would have been incremented again
432	 otherwise), and the definition involves only constants and
433	 SSA names, then we can propagate VALUE into this single use,
434	 avoiding the temp.
435
436	 We can also avoid using a temp if VALUE can be shared and
437	 propagated into all uses, without generating expressions that
438	 wouldn't be valid gimple RHSs.
439
440	 Other cases that would require unsharing or non-gimple RHSs
441	 are deferred to a debug temp, although we could avoid temps
442	 at the expense of duplication of expressions.  */
443
444      if (CONSTANT_CLASS_P (value)
445	  || gimple_code (def_stmt) == GIMPLE_PHI
446	  || (usecount == 1
447	      && (!gimple_assign_single_p (def_stmt)
448		  || is_gimple_min_invariant (value)))
449	  || is_gimple_reg (value))
450	;
451      else
452	{
453	  gdebug *def_temp;
454	  tree vexpr = make_node (DEBUG_EXPR_DECL);
455
456	  def_temp = gimple_build_debug_bind (vexpr,
457					      unshare_expr (value),
458					      def_stmt);
459
460	  DECL_ARTIFICIAL (vexpr) = 1;
461	  TREE_TYPE (vexpr) = TREE_TYPE (value);
462	  if (DECL_P (value))
463	    DECL_MODE (vexpr) = DECL_MODE (value);
464	  else
465	    DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
466
467	  if (gsi)
468	    gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
469	  else
470	    {
471	      gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
472	      gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
473	    }
474
475	  value = vexpr;
476	}
477    }
478
479  FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
480    {
481      if (!gimple_debug_bind_p (stmt))
482	continue;
483
484      if (value)
485	{
486	  FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
487	    /* unshare_expr is not needed here.  vexpr is either a
488	       SINGLE_RHS, that can be safely shared, some other RHS
489	       that was unshared when we found it had a single debug
490	       use, or a DEBUG_EXPR_DECL, that can be safely
491	       shared.  */
492	    SET_USE (use_p, unshare_expr (value));
493	  /* If we didn't replace uses with a debug decl fold the
494	     resulting expression.  Otherwise we end up with invalid IL.  */
495	  if (TREE_CODE (value) != DEBUG_EXPR_DECL)
496	    {
497	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
498	      fold_stmt_inplace (&gsi);
499	    }
500	}
501      else
502	gimple_debug_bind_reset_value (stmt);
503
504      update_stmt (stmt);
505    }
506}
507
508
509/* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
510   other DEBUG stmts, and replace uses of the DEF with the
511   newly-created debug temp.  */
512
513void
514insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
515{
516  gimple stmt;
517  ssa_op_iter op_iter;
518  def_operand_p def_p;
519
520  if (!MAY_HAVE_DEBUG_STMTS)
521    return;
522
523  stmt = gsi_stmt (*gsi);
524
525  FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
526    {
527      tree var = DEF_FROM_PTR (def_p);
528
529      if (TREE_CODE (var) != SSA_NAME)
530	continue;
531
532      insert_debug_temp_for_var_def (gsi, var);
533    }
534}
535
536/* Reset all debug stmts that use SSA_NAME(s) defined in STMT.  */
537
538void
539reset_debug_uses (gimple stmt)
540{
541  ssa_op_iter op_iter;
542  def_operand_p def_p;
543  imm_use_iterator imm_iter;
544  gimple use_stmt;
545
546  if (!MAY_HAVE_DEBUG_STMTS)
547    return;
548
549  FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
550    {
551      tree var = DEF_FROM_PTR (def_p);
552
553      if (TREE_CODE (var) != SSA_NAME)
554	continue;
555
556      FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
557	{
558	  if (!gimple_debug_bind_p (use_stmt))
559	    continue;
560
561	  gimple_debug_bind_reset_value (use_stmt);
562	  update_stmt (use_stmt);
563	}
564    }
565}
566
567/* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
568   dominated stmts before their dominators, so that release_ssa_defs
569   stands a chance of propagating DEFs into debug bind stmts.  */
570
571void
572release_defs_bitset (bitmap toremove)
573{
574  unsigned j;
575  bitmap_iterator bi;
576
577  /* Performing a topological sort is probably overkill, this will
578     most likely run in slightly superlinear time, rather than the
579     pathological quadratic worst case.  */
580  while (!bitmap_empty_p (toremove))
581    EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
582      {
583	bool remove_now = true;
584	tree var = ssa_name (j);
585	gimple stmt;
586	imm_use_iterator uit;
587
588	FOR_EACH_IMM_USE_STMT (stmt, uit, var)
589	  {
590	    ssa_op_iter dit;
591	    def_operand_p def_p;
592
593	    /* We can't propagate PHI nodes into debug stmts.  */
594	    if (gimple_code (stmt) == GIMPLE_PHI
595		|| is_gimple_debug (stmt))
596	      continue;
597
598	    /* If we find another definition to remove that uses
599	       the one we're looking at, defer the removal of this
600	       one, so that it can be propagated into debug stmts
601	       after the other is.  */
602	    FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
603	      {
604		tree odef = DEF_FROM_PTR (def_p);
605
606		if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
607		  {
608		    remove_now = false;
609		    break;
610		  }
611	      }
612
613	    if (!remove_now)
614	      BREAK_FROM_IMM_USE_STMT (uit);
615	  }
616
617	if (remove_now)
618	  {
619	    gimple def = SSA_NAME_DEF_STMT (var);
620	    gimple_stmt_iterator gsi = gsi_for_stmt (def);
621
622	    if (gimple_code (def) == GIMPLE_PHI)
623	      remove_phi_node (&gsi, true);
624	    else
625	      {
626		gsi_remove (&gsi, true);
627		release_defs (def);
628	      }
629
630	    bitmap_clear_bit (toremove, j);
631	  }
632      }
633}
634
635/* Return true if SSA_NAME is malformed and mark it visited.
636
637   IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
638      operand.  */
639
640static bool
641verify_ssa_name (tree ssa_name, bool is_virtual)
642{
643  if (TREE_CODE (ssa_name) != SSA_NAME)
644    {
645      error ("expected an SSA_NAME object");
646      return true;
647    }
648
649  if (SSA_NAME_IN_FREE_LIST (ssa_name))
650    {
651      error ("found an SSA_NAME that had been released into the free pool");
652      return true;
653    }
654
655  if (SSA_NAME_VAR (ssa_name) != NULL_TREE
656      && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
657    {
658      error ("type mismatch between an SSA_NAME and its symbol");
659      return true;
660    }
661
662  if (is_virtual && !virtual_operand_p (ssa_name))
663    {
664      error ("found a virtual definition for a GIMPLE register");
665      return true;
666    }
667
668  if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
669    {
670      error ("virtual SSA name for non-VOP decl");
671      return true;
672    }
673
674  if (!is_virtual && virtual_operand_p (ssa_name))
675    {
676      error ("found a real definition for a non-register");
677      return true;
678    }
679
680  if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
681      && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
682    {
683      error ("found a default name with a non-empty defining statement");
684      return true;
685    }
686
687  return false;
688}
689
690
691/* Return true if the definition of SSA_NAME at block BB is malformed.
692
693   STMT is the statement where SSA_NAME is created.
694
695   DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
696      version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
697      it means that the block in that array slot contains the
698      definition of SSA_NAME.
699
700   IS_VIRTUAL is true if SSA_NAME is created by a VDEF.  */
701
702static bool
703verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
704	    gimple stmt, bool is_virtual)
705{
706  if (verify_ssa_name (ssa_name, is_virtual))
707    goto err;
708
709  if (SSA_NAME_VAR (ssa_name)
710      && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
711      && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
712    {
713      error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
714      goto err;
715    }
716
717  if (definition_block[SSA_NAME_VERSION (ssa_name)])
718    {
719      error ("SSA_NAME created in two different blocks %i and %i",
720	     definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
721      goto err;
722    }
723
724  definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
725
726  if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
727    {
728      error ("SSA_NAME_DEF_STMT is wrong");
729      fprintf (stderr, "Expected definition statement:\n");
730      print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
731      fprintf (stderr, "\nActual definition statement:\n");
732      print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
733      goto err;
734    }
735
736  return false;
737
738err:
739  fprintf (stderr, "while verifying SSA_NAME ");
740  print_generic_expr (stderr, ssa_name, 0);
741  fprintf (stderr, " in statement\n");
742  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
743
744  return true;
745}
746
747
748/* Return true if the use of SSA_NAME at statement STMT in block BB is
749   malformed.
750
751   DEF_BB is the block where SSA_NAME was found to be created.
752
753   IDOM contains immediate dominator information for the flowgraph.
754
755   CHECK_ABNORMAL is true if the caller wants to check whether this use
756      is flowing through an abnormal edge (only used when checking PHI
757      arguments).
758
759   If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
760     that are defined before STMT in basic block BB.  */
761
762static bool
763verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
764	    gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
765{
766  bool err = false;
767  tree ssa_name = USE_FROM_PTR (use_p);
768
769  if (!TREE_VISITED (ssa_name))
770    if (verify_imm_links (stderr, ssa_name))
771      err = true;
772
773  TREE_VISITED (ssa_name) = 1;
774
775  if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
776      && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
777    ; /* Default definitions have empty statements.  Nothing to do.  */
778  else if (!def_bb)
779    {
780      error ("missing definition");
781      err = true;
782    }
783  else if (bb != def_bb
784	   && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
785    {
786      error ("definition in block %i does not dominate use in block %i",
787	     def_bb->index, bb->index);
788      err = true;
789    }
790  else if (bb == def_bb
791	   && names_defined_in_bb != NULL
792	   && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
793    {
794      error ("definition in block %i follows the use", def_bb->index);
795      err = true;
796    }
797
798  if (check_abnormal
799      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
800    {
801      error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
802      err = true;
803    }
804
805  /* Make sure the use is in an appropriate list by checking the previous
806     element to make sure it's the same.  */
807  if (use_p->prev == NULL)
808    {
809      error ("no immediate_use list");
810      err = true;
811    }
812  else
813    {
814      tree listvar;
815      if (use_p->prev->use == NULL)
816	listvar = use_p->prev->loc.ssa_name;
817      else
818	listvar = USE_FROM_PTR (use_p->prev);
819      if (listvar != ssa_name)
820        {
821	  error ("wrong immediate use list");
822	  err = true;
823	}
824    }
825
826  if (err)
827    {
828      fprintf (stderr, "for SSA_NAME: ");
829      print_generic_expr (stderr, ssa_name, TDF_VOPS);
830      fprintf (stderr, " in statement:\n");
831      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
832    }
833
834  return err;
835}
836
837
838/* Return true if any of the arguments for PHI node PHI at block BB is
839   malformed.
840
841   DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
842      version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
843      it means that the block in that array slot contains the
844      definition of SSA_NAME.  */
845
846static bool
847verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
848{
849  edge e;
850  bool err = false;
851  size_t i, phi_num_args = gimple_phi_num_args (phi);
852
853  if (EDGE_COUNT (bb->preds) != phi_num_args)
854    {
855      error ("incoming edge count does not match number of PHI arguments");
856      err = true;
857      goto error;
858    }
859
860  for (i = 0; i < phi_num_args; i++)
861    {
862      use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
863      tree op = USE_FROM_PTR (op_p);
864
865      e = EDGE_PRED (bb, i);
866
867      if (op == NULL_TREE)
868	{
869	  error ("PHI argument is missing for edge %d->%d",
870	         e->src->index,
871		 e->dest->index);
872	  err = true;
873	  goto error;
874	}
875
876      if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
877	{
878	  error ("PHI argument is not SSA_NAME, or invariant");
879	  err = true;
880	}
881
882      if (TREE_CODE (op) == SSA_NAME)
883	{
884	  err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
885	  err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
886			     op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
887	}
888
889      if (TREE_CODE (op) == ADDR_EXPR)
890	{
891	  tree base = TREE_OPERAND (op, 0);
892	  while (handled_component_p (base))
893	    base = TREE_OPERAND (base, 0);
894	  if ((TREE_CODE (base) == VAR_DECL
895	       || TREE_CODE (base) == PARM_DECL
896	       || TREE_CODE (base) == RESULT_DECL)
897	      && !TREE_ADDRESSABLE (base))
898	    {
899	      error ("address taken, but ADDRESSABLE bit not set");
900	      err = true;
901	    }
902	}
903
904      if (e->dest != bb)
905	{
906	  error ("wrong edge %d->%d for PHI argument",
907	         e->src->index, e->dest->index);
908	  err = true;
909	}
910
911      if (err)
912	{
913	  fprintf (stderr, "PHI argument\n");
914	  print_generic_stmt (stderr, op, TDF_VOPS);
915	  goto error;
916	}
917    }
918
919error:
920  if (err)
921    {
922      fprintf (stderr, "for PHI node\n");
923      print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
924    }
925
926
927  return err;
928}
929
930
931/* Verify common invariants in the SSA web.
932   TODO: verify the variable annotations.  */
933
934DEBUG_FUNCTION void
935verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
936{
937  size_t i;
938  basic_block bb;
939  basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
940  ssa_op_iter iter;
941  tree op;
942  enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
943  bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
944
945  gcc_assert (!need_ssa_update_p (cfun));
946
947  timevar_push (TV_TREE_SSA_VERIFY);
948
949  /* Keep track of SSA names present in the IL.  */
950  for (i = 1; i < num_ssa_names; i++)
951    {
952      tree name = ssa_name (i);
953      if (name)
954	{
955	  gimple stmt;
956	  TREE_VISITED (name) = 0;
957
958	  verify_ssa_name (name, virtual_operand_p (name));
959
960	  stmt = SSA_NAME_DEF_STMT (name);
961	  if (!gimple_nop_p (stmt))
962	    {
963	      basic_block bb = gimple_bb (stmt);
964	      if (verify_def (bb, definition_block,
965			      name, stmt, virtual_operand_p (name)))
966		goto err;
967	    }
968	}
969    }
970
971  calculate_dominance_info (CDI_DOMINATORS);
972
973  /* Now verify all the uses and make sure they agree with the definitions
974     found in the previous pass.  */
975  FOR_EACH_BB_FN (bb, cfun)
976    {
977      edge e;
978      edge_iterator ei;
979
980      /* Make sure that all edges have a clear 'aux' field.  */
981      FOR_EACH_EDGE (e, ei, bb->preds)
982	{
983	  if (e->aux)
984	    {
985	      error ("AUX pointer initialized for edge %d->%d", e->src->index,
986		      e->dest->index);
987	      goto err;
988	    }
989	}
990
991      /* Verify the arguments for every PHI node in the block.  */
992      for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
993	{
994	  gphi *phi = gsi.phi ();
995	  if (verify_phi_args (phi, bb, definition_block))
996	    goto err;
997
998	  bitmap_set_bit (names_defined_in_bb,
999			  SSA_NAME_VERSION (gimple_phi_result (phi)));
1000	}
1001
1002      /* Now verify all the uses and vuses in every statement of the block.  */
1003      for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1004	   gsi_next (&gsi))
1005	{
1006	  gimple stmt = gsi_stmt (gsi);
1007	  use_operand_p use_p;
1008
1009	  if (check_modified_stmt && gimple_modified_p (stmt))
1010	    {
1011	      error ("stmt (%p) marked modified after optimization pass: ",
1012		     (void *)stmt);
1013	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1014	      goto err;
1015	    }
1016
1017	  if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1018	    {
1019	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1020	      goto err;
1021	    }
1022
1023	  if (gimple_debug_bind_p (stmt)
1024	      && !gimple_debug_bind_has_value_p (stmt))
1025	    continue;
1026
1027	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1028	    {
1029	      op = USE_FROM_PTR (use_p);
1030	      if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1031			      use_p, stmt, false, names_defined_in_bb))
1032		goto err;
1033	    }
1034
1035	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1036	    {
1037	      if (SSA_NAME_DEF_STMT (op) != stmt)
1038		{
1039		  error ("SSA_NAME_DEF_STMT is wrong");
1040		  fprintf (stderr, "Expected definition statement:\n");
1041		  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1042		  fprintf (stderr, "\nActual definition statement:\n");
1043		  print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1044				     4, TDF_VOPS);
1045		  goto err;
1046		}
1047	      bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1048	    }
1049	}
1050
1051      bitmap_clear (names_defined_in_bb);
1052    }
1053
1054  free (definition_block);
1055
1056  /* Restore the dominance information to its prior known state, so
1057     that we do not perturb the compiler's subsequent behavior.  */
1058  if (orig_dom_state == DOM_NONE)
1059    free_dominance_info (CDI_DOMINATORS);
1060  else
1061    set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1062
1063  BITMAP_FREE (names_defined_in_bb);
1064  timevar_pop (TV_TREE_SSA_VERIFY);
1065  return;
1066
1067err:
1068  internal_error ("verify_ssa failed");
1069}
1070
1071
1072/* Initialize global DFA and SSA structures.  */
1073
1074void
1075init_tree_ssa (struct function *fn)
1076{
1077  fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1078  fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1079  pt_solution_reset (&fn->gimple_df->escaped);
1080  init_ssanames (fn, 0);
1081}
1082
1083/* Do the actions required to initialize internal data structures used
1084   in tree-ssa optimization passes.  */
1085
1086static unsigned int
1087execute_init_datastructures (void)
1088{
1089  /* Allocate hash tables, arrays and other structures.  */
1090  gcc_assert (!cfun->gimple_df);
1091  init_tree_ssa (cfun);
1092  return 0;
1093}
1094
1095namespace {
1096
1097const pass_data pass_data_init_datastructures =
1098{
1099  GIMPLE_PASS, /* type */
1100  "*init_datastructures", /* name */
1101  OPTGROUP_NONE, /* optinfo_flags */
1102  TV_NONE, /* tv_id */
1103  PROP_cfg, /* properties_required */
1104  0, /* properties_provided */
1105  0, /* properties_destroyed */
1106  0, /* todo_flags_start */
1107  0, /* todo_flags_finish */
1108};
1109
1110class pass_init_datastructures : public gimple_opt_pass
1111{
1112public:
1113  pass_init_datastructures (gcc::context *ctxt)
1114    : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1115  {}
1116
1117  /* opt_pass methods: */
1118  virtual bool gate (function *fun)
1119    {
1120      /* Do nothing for funcions that was produced already in SSA form.  */
1121      return !(fun->curr_properties & PROP_ssa);
1122    }
1123
1124  virtual unsigned int execute (function *)
1125    {
1126      return execute_init_datastructures ();
1127    }
1128
1129}; // class pass_init_datastructures
1130
1131} // anon namespace
1132
1133gimple_opt_pass *
1134make_pass_init_datastructures (gcc::context *ctxt)
1135{
1136  return new pass_init_datastructures (ctxt);
1137}
1138
1139/* Deallocate memory associated with SSA data structures for FNDECL.  */
1140
1141void
1142delete_tree_ssa (void)
1143{
1144  fini_ssanames ();
1145
1146  /* We no longer maintain the SSA operand cache at this point.  */
1147  if (ssa_operands_active (cfun))
1148    fini_ssa_operands (cfun);
1149
1150  cfun->gimple_df->default_defs->empty ();
1151  cfun->gimple_df->default_defs = NULL;
1152  pt_solution_reset (&cfun->gimple_df->escaped);
1153  if (cfun->gimple_df->decls_to_pointers != NULL)
1154    delete cfun->gimple_df->decls_to_pointers;
1155  cfun->gimple_df->decls_to_pointers = NULL;
1156  cfun->gimple_df->modified_noreturn_calls = NULL;
1157  cfun->gimple_df = NULL;
1158
1159  /* We no longer need the edge variable maps.  */
1160  redirect_edge_var_map_destroy ();
1161}
1162
1163/* Return true if EXPR is a useless type conversion, otherwise return
1164   false.  */
1165
1166bool
1167tree_ssa_useless_type_conversion (tree expr)
1168{
1169  /* If we have an assignment that merely uses a NOP_EXPR to change
1170     the top of the RHS to the type of the LHS and the type conversion
1171     is "safe", then strip away the type conversion so that we can
1172     enter LHS = RHS into the const_and_copies table.  */
1173  if (CONVERT_EXPR_P (expr)
1174      || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1175      || TREE_CODE (expr) == NON_LVALUE_EXPR)
1176    return useless_type_conversion_p
1177      (TREE_TYPE (expr),
1178       TREE_TYPE (TREE_OPERAND (expr, 0)));
1179
1180  return false;
1181}
1182
1183/* Strip conversions from EXP according to
1184   tree_ssa_useless_type_conversion and return the resulting
1185   expression.  */
1186
1187tree
1188tree_ssa_strip_useless_type_conversions (tree exp)
1189{
1190  while (tree_ssa_useless_type_conversion (exp))
1191    exp = TREE_OPERAND (exp, 0);
1192  return exp;
1193}
1194
1195
1196/* Return true if T, an SSA_NAME, has an undefined value.  PARTIAL is what
1197   should be returned if the value is only partially undefined.  */
1198
1199bool
1200ssa_undefined_value_p (tree t, bool partial)
1201{
1202  gimple def_stmt;
1203  tree var = SSA_NAME_VAR (t);
1204
1205  if (!var)
1206    ;
1207  /* Parameters get their initial value from the function entry.  */
1208  else if (TREE_CODE (var) == PARM_DECL)
1209    return false;
1210  /* When returning by reference the return address is actually a hidden
1211     parameter.  */
1212  else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1213    return false;
1214  /* Hard register variables get their initial value from the ether.  */
1215  else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1216    return false;
1217
1218  /* The value is undefined iff its definition statement is empty.  */
1219  def_stmt = SSA_NAME_DEF_STMT (t);
1220  if (gimple_nop_p (def_stmt))
1221    return true;
1222
1223  /* Check if the complex was not only partially defined.  */
1224  if (partial && is_gimple_assign (def_stmt)
1225      && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1226    {
1227      tree rhs1, rhs2;
1228
1229      rhs1 = gimple_assign_rhs1 (def_stmt);
1230      rhs2 = gimple_assign_rhs2 (def_stmt);
1231      return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1232	     || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1233    }
1234  return false;
1235}
1236
1237
1238/* If necessary, rewrite the base of the reference tree *TP from
1239   a MEM_REF to a plain or converted symbol.  */
1240
1241static void
1242maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1243{
1244  tree sym;
1245
1246  while (handled_component_p (*tp))
1247    tp = &TREE_OPERAND (*tp, 0);
1248  if (TREE_CODE (*tp) == MEM_REF
1249      && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1250      && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1251      && DECL_P (sym)
1252      && !TREE_ADDRESSABLE (sym)
1253      && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1254    {
1255      if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1256	  && useless_type_conversion_p (TREE_TYPE (*tp),
1257					TREE_TYPE (TREE_TYPE (sym)))
1258	  && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1259			    TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1260	{
1261	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1262			TYPE_SIZE (TREE_TYPE (*tp)),
1263			int_const_binop (MULT_EXPR,
1264					 bitsize_int (BITS_PER_UNIT),
1265					 TREE_OPERAND (*tp, 1)));
1266	}
1267      else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1268	       && useless_type_conversion_p (TREE_TYPE (*tp),
1269					     TREE_TYPE (TREE_TYPE (sym))))
1270	{
1271	  *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1272			? REALPART_EXPR : IMAGPART_EXPR,
1273			TREE_TYPE (*tp), sym);
1274	}
1275      else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1276	{
1277	  if (!useless_type_conversion_p (TREE_TYPE (*tp),
1278					  TREE_TYPE (sym)))
1279	    *tp = build1 (VIEW_CONVERT_EXPR,
1280			  TREE_TYPE (*tp), sym);
1281	  else
1282	    *tp = sym;
1283	}
1284    }
1285}
1286
1287/* For a tree REF return its base if it is the base of a MEM_REF
1288   that cannot be rewritten into SSA form.  Otherwise return NULL_TREE.  */
1289
1290static tree
1291non_rewritable_mem_ref_base (tree ref)
1292{
1293  tree base = ref;
1294
1295  /* A plain decl does not need it set.  */
1296  if (DECL_P (ref))
1297    return NULL_TREE;
1298
1299  while (handled_component_p (base))
1300    base = TREE_OPERAND (base, 0);
1301
1302  /* But watch out for MEM_REFs we cannot lower to a
1303     VIEW_CONVERT_EXPR or a BIT_FIELD_REF.  */
1304  if (TREE_CODE (base) == MEM_REF
1305      && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1306    {
1307      tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1308      if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1309	   || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1310	  && useless_type_conversion_p (TREE_TYPE (base),
1311					TREE_TYPE (TREE_TYPE (decl)))
1312	  && wi::fits_uhwi_p (mem_ref_offset (base))
1313	  && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1314			mem_ref_offset (base))
1315	  && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1316			    TYPE_SIZE_UNIT (TREE_TYPE (base))))
1317	return NULL_TREE;
1318      if (DECL_P (decl)
1319	  && (!integer_zerop (TREE_OPERAND (base, 1))
1320	      || (DECL_SIZE (decl)
1321		  != TYPE_SIZE (TREE_TYPE (base)))
1322	      || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1323	return decl;
1324    }
1325
1326  return NULL_TREE;
1327}
1328
1329/* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1330   Otherwise return true.  */
1331
1332static bool
1333non_rewritable_lvalue_p (tree lhs)
1334{
1335  /* A plain decl is always rewritable.  */
1336  if (DECL_P (lhs))
1337    return false;
1338
1339  /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1340     a reasonably efficient manner... */
1341  if ((TREE_CODE (lhs) == REALPART_EXPR
1342       || TREE_CODE (lhs) == IMAGPART_EXPR)
1343      && DECL_P (TREE_OPERAND (lhs, 0)))
1344    return false;
1345
1346  /* A decl that is wrapped inside a MEM-REF that covers
1347     it full is also rewritable.
1348     ???  The following could be relaxed allowing component
1349     references that do not change the access size.  */
1350  if (TREE_CODE (lhs) == MEM_REF
1351      && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1352      && integer_zerop (TREE_OPERAND (lhs, 1)))
1353    {
1354      tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1355      if (DECL_P (decl)
1356	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1357	  && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1358	return false;
1359    }
1360
1361  return true;
1362}
1363
1364/* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1365   mark the variable VAR for conversion into SSA.  Return true when updating
1366   stmts is required.  */
1367
1368static void
1369maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1370		    bitmap suitable_for_renaming)
1371{
1372  /* Global Variables, result decls cannot be changed.  */
1373  if (is_global_var (var)
1374      || TREE_CODE (var) == RESULT_DECL
1375      || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1376    return;
1377
1378  if (TREE_ADDRESSABLE (var)
1379      /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1380	 a non-register.  Otherwise we are confused and forget to
1381	 add virtual operands for it.  */
1382      && (!is_gimple_reg_type (TREE_TYPE (var))
1383	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1384	  || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1385	  || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1386    {
1387      TREE_ADDRESSABLE (var) = 0;
1388      if (is_gimple_reg (var))
1389	bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1390      if (dump_file)
1391	{
1392	  fprintf (dump_file, "No longer having address taken: ");
1393	  print_generic_expr (dump_file, var, 0);
1394	  fprintf (dump_file, "\n");
1395	}
1396    }
1397
1398  if (!DECL_GIMPLE_REG_P (var)
1399      && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1400      && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1401	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1402      && !TREE_THIS_VOLATILE (var)
1403      && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1404    {
1405      DECL_GIMPLE_REG_P (var) = 1;
1406      bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1407      if (dump_file)
1408	{
1409	  fprintf (dump_file, "Now a gimple register: ");
1410	  print_generic_expr (dump_file, var, 0);
1411	  fprintf (dump_file, "\n");
1412	}
1413    }
1414}
1415
1416/* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
1417
1418void
1419execute_update_addresses_taken (void)
1420{
1421  basic_block bb;
1422  bitmap addresses_taken = BITMAP_ALLOC (NULL);
1423  bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1424  bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1425  tree var;
1426  unsigned i;
1427
1428  timevar_push (TV_ADDRESS_TAKEN);
1429
1430  /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1431     the function body.  */
1432  FOR_EACH_BB_FN (bb, cfun)
1433    {
1434      for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1435	   gsi_next (&gsi))
1436	{
1437	  gimple stmt = gsi_stmt (gsi);
1438	  enum gimple_code code = gimple_code (stmt);
1439	  tree decl;
1440
1441	  /* Note all addresses taken by the stmt.  */
1442	  gimple_ior_addresses_taken (addresses_taken, stmt);
1443
1444	  /* If we have a call or an assignment, see if the lhs contains
1445	     a local decl that requires not to be a gimple register.  */
1446	  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1447	    {
1448              tree lhs = gimple_get_lhs (stmt);
1449              if (lhs
1450		  && TREE_CODE (lhs) != SSA_NAME
1451		  && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1452		      || non_rewritable_lvalue_p (lhs)))
1453		{
1454		  decl = get_base_address (lhs);
1455		  if (DECL_P (decl))
1456		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1457                }
1458	    }
1459
1460	  if (gimple_assign_single_p (stmt))
1461	    {
1462	      tree rhs = gimple_assign_rhs1 (stmt);
1463	      if ((decl = non_rewritable_mem_ref_base (rhs)))
1464		bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1465	    }
1466
1467	  else if (code == GIMPLE_CALL)
1468	    {
1469	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
1470		{
1471		  tree arg = gimple_call_arg (stmt, i);
1472		  if ((decl = non_rewritable_mem_ref_base (arg)))
1473		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1474		}
1475	    }
1476
1477	  else if (code == GIMPLE_ASM)
1478	    {
1479	      gasm *asm_stmt = as_a <gasm *> (stmt);
1480	      for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1481		{
1482		  tree link = gimple_asm_output_op (asm_stmt, i);
1483		  tree lhs = TREE_VALUE (link);
1484		  if (TREE_CODE (lhs) != SSA_NAME)
1485		    {
1486		      decl = get_base_address (lhs);
1487		      if (DECL_P (decl)
1488			  && (non_rewritable_lvalue_p (lhs)
1489			      /* We cannot move required conversions from
1490				 the lhs to the rhs in asm statements, so
1491				 require we do not need any.  */
1492			      || !useless_type_conversion_p
1493			            (TREE_TYPE (lhs), TREE_TYPE (decl))))
1494			bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1495		    }
1496		}
1497	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1498		{
1499		  tree link = gimple_asm_input_op (asm_stmt, i);
1500		  if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1501		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1502		}
1503	    }
1504	}
1505
1506      for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1507	   gsi_next (&gsi))
1508	{
1509	  size_t i;
1510	  gphi *phi = gsi.phi ();
1511
1512	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1513	    {
1514	      tree op = PHI_ARG_DEF (phi, i), var;
1515	      if (TREE_CODE (op) == ADDR_EXPR
1516		  && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1517		  && DECL_P (var))
1518		bitmap_set_bit (addresses_taken, DECL_UID (var));
1519	    }
1520	}
1521    }
1522
1523  /* We cannot iterate over all referenced vars because that can contain
1524     unused vars from BLOCK trees, which causes code generation differences
1525     for -g vs. -g0.  */
1526  for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1527    maybe_optimize_var (var, addresses_taken, not_reg_needs,
1528			suitable_for_renaming);
1529
1530  FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1531    maybe_optimize_var (var, addresses_taken, not_reg_needs,
1532			suitable_for_renaming);
1533
1534  /* Operand caches need to be recomputed for operands referencing the updated
1535     variables and operands need to be rewritten to expose bare symbols.  */
1536  if (!bitmap_empty_p (suitable_for_renaming))
1537    {
1538      FOR_EACH_BB_FN (bb, cfun)
1539	for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1540	  {
1541	    gimple stmt = gsi_stmt (gsi);
1542
1543	    /* Re-write TARGET_MEM_REFs of symbols we want to
1544	       rewrite into SSA form.  */
1545	    if (gimple_assign_single_p (stmt))
1546	      {
1547		tree lhs = gimple_assign_lhs (stmt);
1548		tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1549		tree sym;
1550
1551		/* Rewrite LHS IMAG/REALPART_EXPR similar to
1552		   gimplify_modify_expr_complex_part.  */
1553		if ((TREE_CODE (lhs) == IMAGPART_EXPR
1554		     || TREE_CODE (lhs) == REALPART_EXPR)
1555		    && DECL_P (TREE_OPERAND (lhs, 0))
1556		    && bitmap_bit_p (suitable_for_renaming,
1557				     DECL_UID (TREE_OPERAND (lhs, 0))))
1558		  {
1559		    tree other = make_ssa_name (TREE_TYPE (lhs));
1560		    tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1561					? REALPART_EXPR : IMAGPART_EXPR,
1562					TREE_TYPE (other),
1563					TREE_OPERAND (lhs, 0));
1564		    gimple load = gimple_build_assign (other, lrhs);
1565		    location_t loc = gimple_location (stmt);
1566		    gimple_set_location (load, loc);
1567		    gimple_set_vuse (load, gimple_vuse (stmt));
1568		    gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1569		    gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1570		    gimple_assign_set_rhs_with_ops
1571		      (&gsi, COMPLEX_EXPR,
1572		       TREE_CODE (lhs) == IMAGPART_EXPR
1573		       ? other : gimple_assign_rhs1 (stmt),
1574		       TREE_CODE (lhs) == IMAGPART_EXPR
1575		       ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1576		    stmt = gsi_stmt (gsi);
1577		    unlink_stmt_vdef (stmt);
1578		    update_stmt (stmt);
1579		    continue;
1580		  }
1581
1582		/* We shouldn't have any fancy wrapping of
1583		   component-refs on the LHS, but look through
1584		   VIEW_CONVERT_EXPRs as that is easy.  */
1585		while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1586		  lhs = TREE_OPERAND (lhs, 0);
1587		if (TREE_CODE (lhs) == MEM_REF
1588		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1589		    && integer_zerop (TREE_OPERAND (lhs, 1))
1590		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1591		    && DECL_P (sym)
1592		    && !TREE_ADDRESSABLE (sym)
1593		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1594		  lhs = sym;
1595		else
1596		  lhs = gimple_assign_lhs (stmt);
1597
1598		/* Rewrite the RHS and make sure the resulting assignment
1599		   is validly typed.  */
1600		maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1601		rhs = gimple_assign_rhs1 (stmt);
1602		if (gimple_assign_lhs (stmt) != lhs
1603		    && !useless_type_conversion_p (TREE_TYPE (lhs),
1604						   TREE_TYPE (rhs)))
1605		  rhs = fold_build1 (VIEW_CONVERT_EXPR,
1606				     TREE_TYPE (lhs), rhs);
1607
1608		if (gimple_assign_lhs (stmt) != lhs)
1609		  gimple_assign_set_lhs (stmt, lhs);
1610
1611		if (gimple_assign_rhs1 (stmt) != rhs)
1612		  {
1613		    gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1614		    gimple_assign_set_rhs_from_tree (&gsi, rhs);
1615		  }
1616	      }
1617
1618	    else if (gimple_code (stmt) == GIMPLE_CALL)
1619	      {
1620		unsigned i;
1621		for (i = 0; i < gimple_call_num_args (stmt); ++i)
1622		  {
1623		    tree *argp = gimple_call_arg_ptr (stmt, i);
1624		    maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1625		  }
1626	      }
1627
1628	    else if (gimple_code (stmt) == GIMPLE_ASM)
1629	      {
1630		gasm *asm_stmt = as_a <gasm *> (stmt);
1631		unsigned i;
1632		for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1633		  {
1634		    tree link = gimple_asm_output_op (asm_stmt, i);
1635		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1636						suitable_for_renaming);
1637		  }
1638		for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1639		  {
1640		    tree link = gimple_asm_input_op (asm_stmt, i);
1641		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1642						suitable_for_renaming);
1643		  }
1644	      }
1645
1646	    else if (gimple_debug_bind_p (stmt)
1647		     && gimple_debug_bind_has_value_p (stmt))
1648	      {
1649		tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1650		tree decl;
1651		maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1652		decl = non_rewritable_mem_ref_base (*valuep);
1653		if (decl
1654		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1655		  gimple_debug_bind_reset_value (stmt);
1656	      }
1657
1658	    if (gimple_references_memory_p (stmt)
1659		|| is_gimple_debug (stmt))
1660	      update_stmt (stmt);
1661
1662	    gsi_next (&gsi);
1663	  }
1664
1665      /* Update SSA form here, we are called as non-pass as well.  */
1666      if (number_of_loops (cfun) > 1
1667	  && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1668	rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1669      else
1670	update_ssa (TODO_update_ssa);
1671    }
1672
1673  BITMAP_FREE (not_reg_needs);
1674  BITMAP_FREE (addresses_taken);
1675  BITMAP_FREE (suitable_for_renaming);
1676  timevar_pop (TV_ADDRESS_TAKEN);
1677}
1678
1679namespace {
1680
1681const pass_data pass_data_update_address_taken =
1682{
1683  GIMPLE_PASS, /* type */
1684  "addressables", /* name */
1685  OPTGROUP_NONE, /* optinfo_flags */
1686  TV_ADDRESS_TAKEN, /* tv_id */
1687  PROP_ssa, /* properties_required */
1688  0, /* properties_provided */
1689  0, /* properties_destroyed */
1690  0, /* todo_flags_start */
1691  TODO_update_address_taken, /* todo_flags_finish */
1692};
1693
1694class pass_update_address_taken : public gimple_opt_pass
1695{
1696public:
1697  pass_update_address_taken (gcc::context *ctxt)
1698    : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1699  {}
1700
1701  /* opt_pass methods: */
1702
1703}; // class pass_update_address_taken
1704
1705} // anon namespace
1706
1707gimple_opt_pass *
1708make_pass_update_address_taken (gcc::context *ctxt)
1709{
1710  return new pass_update_address_taken (ctxt);
1711}
1712