1/* SSA operands management for trees.
2   Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "hash-set.h"
25#include "machmode.h"
26#include "vec.h"
27#include "double-int.h"
28#include "input.h"
29#include "alias.h"
30#include "symtab.h"
31#include "wide-int.h"
32#include "inchash.h"
33#include "tree.h"
34#include "fold-const.h"
35#include "stmt.h"
36#include "print-tree.h"
37#include "flags.h"
38#include "hard-reg-set.h"
39#include "input.h"
40#include "function.h"
41#include "gimple-pretty-print.h"
42#include "bitmap.h"
43#include "predict.h"
44#include "basic-block.h"
45#include "tree-ssa-alias.h"
46#include "internal-fn.h"
47#include "gimple-expr.h"
48#include "is-a.h"
49#include "gimple.h"
50#include "gimple-ssa.h"
51#include "tree-phinodes.h"
52#include "ssa-iterators.h"
53#include "stringpool.h"
54#include "tree-ssanames.h"
55#include "tree-inline.h"
56#include "timevar.h"
57#include "dumpfile.h"
58#include "timevar.h"
59#include "langhooks.h"
60#include "diagnostic-core.h"
61
62
63/* This file contains the code required to manage the operands cache of the
64   SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
65   annotation.  This cache contains operands that will be of interest to
66   optimizers and other passes wishing to manipulate the IL.
67
68   The operand type are broken up into REAL and VIRTUAL operands.  The real
69   operands are represented as pointers into the stmt's operand tree.  Thus
70   any manipulation of the real operands will be reflected in the actual tree.
71   Virtual operands are represented solely in the cache, although the base
72   variable for the SSA_NAME may, or may not occur in the stmt's tree.
73   Manipulation of the virtual operands will not be reflected in the stmt tree.
74
75   The routines in this file are concerned with creating this operand cache
76   from a stmt tree.
77
78   The operand tree is the parsed by the various get_* routines which look
79   through the stmt tree for the occurrence of operands which may be of
80   interest, and calls are made to the append_* routines whenever one is
81   found.  There are 4 of these routines, each representing one of the
82   4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
83
84   The append_* routines check for duplication, and simply keep a list of
85   unique objects for each operand type in the build_* extendable vectors.
86
87   Once the stmt tree is completely parsed, the finalize_ssa_operands()
88   routine is called, which proceeds to perform the finalization routine
89   on each of the 4 operand vectors which have been built up.
90
91   If the stmt had a previous operand cache, the finalization routines
92   attempt to match up the new operands with the old ones.  If it's a perfect
93   match, the old vector is simply reused.  If it isn't a perfect match, then
94   a new vector is created and the new operands are placed there.  For
95   virtual operands, if the previous cache had SSA_NAME version of a
96   variable, and that same variable occurs in the same operands cache, then
97   the new cache vector will also get the same SSA_NAME.
98
99   i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
100   operand vector for VUSE, then the new vector will also be modified
101   such that it contains 'a_5' rather than 'a'.  */
102
103
104/* Flags to describe operand properties in helpers.  */
105
106/* By default, operands are loaded.  */
107#define opf_use		0
108
109/* Operand is the target of an assignment expression or a
110   call-clobbered variable.  */
111#define opf_def 	(1 << 0)
112
113/* No virtual operands should be created in the expression.  This is used
114   when traversing ADDR_EXPR nodes which have different semantics than
115   other expressions.  Inside an ADDR_EXPR node, the only operands that we
116   need to consider are indices into arrays.  For instance, &a.b[i] should
117   generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
118   VUSE for 'b'.  */
119#define opf_no_vops 	(1 << 1)
120
121/* Operand is in a place where address-taken does not imply addressable.  */
122#define opf_non_addressable (1 << 3)
123
124/* Operand is in a place where opf_non_addressable does not apply.  */
125#define opf_not_non_addressable (1 << 4)
126
127/* Operand is having its address taken.  */
128#define opf_address_taken (1 << 5)
129
130/* Array for building all the use operands.  */
131static vec<tree> build_uses;
132
133/* The built VDEF operand.  */
134static tree build_vdef;
135
136/* The built VUSE operand.  */
137static tree build_vuse;
138
139/* Bitmap obstack for our datastructures that needs to survive across
140   compilations of multiple functions.  */
141static bitmap_obstack operands_bitmap_obstack;
142
143static void get_expr_operands (struct function *, gimple, tree *, int);
144
145/* Number of functions with initialized ssa_operands.  */
146static int n_initialized = 0;
147
148/* Accessor to tree-ssa-operands.c caches.  */
149static inline struct ssa_operands *
150gimple_ssa_operands (const struct function *fun)
151{
152  return &fun->gimple_df->ssa_operands;
153}
154
155
156/*  Return true if the SSA operands cache is active.  */
157
158bool
159ssa_operands_active (struct function *fun)
160{
161  if (fun == NULL)
162    return false;
163
164  return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
165}
166
167
168/* Create the VOP variable, an artificial global variable to act as a
169   representative of all of the virtual operands FUD chain.  */
170
171static void
172create_vop_var (struct function *fn)
173{
174  tree global_var;
175
176  gcc_assert (fn->gimple_df->vop == NULL_TREE);
177
178  global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
179			   get_identifier (".MEM"),
180			   void_type_node);
181  DECL_ARTIFICIAL (global_var) = 1;
182  DECL_IGNORED_P (global_var) = 1;
183  TREE_READONLY (global_var) = 0;
184  DECL_EXTERNAL (global_var) = 1;
185  TREE_STATIC (global_var) = 1;
186  TREE_USED (global_var) = 1;
187  DECL_CONTEXT (global_var) = NULL_TREE;
188  TREE_THIS_VOLATILE (global_var) = 0;
189  TREE_ADDRESSABLE (global_var) = 0;
190  VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
191
192  fn->gimple_df->vop = global_var;
193}
194
195/* These are the sizes of the operand memory buffer in bytes which gets
196   allocated each time more operands space is required.  The final value is
197   the amount that is allocated every time after that.
198   In 1k we can fit 25 use operands (or 63 def operands) on a host with
199   8 byte pointers, that would be 10 statements each with 1 def and 2
200   uses.  */
201
202#define OP_SIZE_INIT	0
203#define OP_SIZE_1	(1024 - sizeof (void *))
204#define OP_SIZE_2	(1024 * 4 - sizeof (void *))
205#define OP_SIZE_3	(1024 * 16 - sizeof (void *))
206
207/* Initialize the operand cache routines.  */
208
209void
210init_ssa_operands (struct function *fn)
211{
212  if (!n_initialized++)
213    {
214      build_uses.create (10);
215      build_vuse = NULL_TREE;
216      build_vdef = NULL_TREE;
217      bitmap_obstack_initialize (&operands_bitmap_obstack);
218    }
219
220  gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
221  gimple_ssa_operands (fn)->operand_memory_index
222     = gimple_ssa_operands (fn)->ssa_operand_mem_size;
223  gimple_ssa_operands (fn)->ops_active = true;
224  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
225  create_vop_var (fn);
226}
227
228
229/* Dispose of anything required by the operand routines.  */
230
231void
232fini_ssa_operands (struct function *fn)
233{
234  struct ssa_operand_memory_d *ptr;
235
236  if (!--n_initialized)
237    {
238      build_uses.release ();
239      build_vdef = NULL_TREE;
240      build_vuse = NULL_TREE;
241    }
242
243  gimple_ssa_operands (fn)->free_uses = NULL;
244
245  while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
246    {
247      gimple_ssa_operands (fn)->operand_memory
248	= gimple_ssa_operands (fn)->operand_memory->next;
249      ggc_free (ptr);
250    }
251
252  gimple_ssa_operands (fn)->ops_active = false;
253
254  if (!n_initialized)
255    bitmap_obstack_release (&operands_bitmap_obstack);
256
257  fn->gimple_df->vop = NULL_TREE;
258}
259
260
261/* Return memory for an operand of size SIZE.  */
262
263static inline void *
264ssa_operand_alloc (struct function *fn, unsigned size)
265{
266  char *ptr;
267
268  gcc_assert (size == sizeof (struct use_optype_d));
269
270  if (gimple_ssa_operands (fn)->operand_memory_index + size
271      >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
272    {
273      struct ssa_operand_memory_d *ptr;
274
275      switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
276	{
277	case OP_SIZE_INIT:
278	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
279	  break;
280	case OP_SIZE_1:
281	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
282	  break;
283	case OP_SIZE_2:
284	case OP_SIZE_3:
285	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
286	  break;
287	default:
288	  gcc_unreachable ();
289	}
290
291
292      ptr = (ssa_operand_memory_d *) ggc_internal_alloc
293	(sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
294
295      ptr->next = gimple_ssa_operands (fn)->operand_memory;
296      gimple_ssa_operands (fn)->operand_memory = ptr;
297      gimple_ssa_operands (fn)->operand_memory_index = 0;
298    }
299
300  ptr = &(gimple_ssa_operands (fn)->operand_memory
301	  ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
302  gimple_ssa_operands (fn)->operand_memory_index += size;
303  return ptr;
304}
305
306
307/* Allocate a USE operand.  */
308
309static inline struct use_optype_d *
310alloc_use (struct function *fn)
311{
312  struct use_optype_d *ret;
313  if (gimple_ssa_operands (fn)->free_uses)
314    {
315      ret = gimple_ssa_operands (fn)->free_uses;
316      gimple_ssa_operands (fn)->free_uses
317	= gimple_ssa_operands (fn)->free_uses->next;
318    }
319  else
320    ret = (struct use_optype_d *)
321          ssa_operand_alloc (fn, sizeof (struct use_optype_d));
322  return ret;
323}
324
325
326/* Adds OP to the list of uses of statement STMT after LAST.  */
327
328static inline use_optype_p
329add_use_op (struct function *fn, gimple stmt, tree *op, use_optype_p last)
330{
331  use_optype_p new_use;
332
333  new_use = alloc_use (fn);
334  USE_OP_PTR (new_use)->use = op;
335  link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
336  last->next = new_use;
337  new_use->next = NULL;
338  return new_use;
339}
340
341
342
343/* Takes elements from build_defs and turns them into def operands of STMT.
344   TODO -- Make build_defs vec of tree *.  */
345
346static inline void
347finalize_ssa_defs (struct function *fn, gimple stmt)
348{
349  /* Pre-pend the vdef we may have built.  */
350  if (build_vdef != NULL_TREE)
351    {
352      tree oldvdef = gimple_vdef (stmt);
353      if (oldvdef
354	  && TREE_CODE (oldvdef) == SSA_NAME)
355	oldvdef = SSA_NAME_VAR (oldvdef);
356      if (oldvdef != build_vdef)
357	gimple_set_vdef (stmt, build_vdef);
358    }
359
360  /* Clear and unlink a no longer necessary VDEF.  */
361  if (build_vdef == NULL_TREE
362      && gimple_vdef (stmt) != NULL_TREE)
363    {
364      if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
365	{
366	  unlink_stmt_vdef (stmt);
367	  release_ssa_name_fn (fn, gimple_vdef (stmt));
368	}
369      gimple_set_vdef (stmt, NULL_TREE);
370    }
371
372  /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
373  if (gimple_vdef (stmt)
374      && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
375    {
376      fn->gimple_df->rename_vops = 1;
377      fn->gimple_df->ssa_renaming_needed = 1;
378    }
379}
380
381
382/* Takes elements from build_uses and turns them into use operands of STMT.
383   TODO -- Make build_uses vec of tree *.  */
384
385static inline void
386finalize_ssa_uses (struct function *fn, gimple stmt)
387{
388  unsigned new_i;
389  struct use_optype_d new_list;
390  use_optype_p old_ops, ptr, last;
391
392  /* Pre-pend the VUSE we may have built.  */
393  if (build_vuse != NULL_TREE)
394    {
395      tree oldvuse = gimple_vuse (stmt);
396      if (oldvuse
397	  && TREE_CODE (oldvuse) == SSA_NAME)
398	oldvuse = SSA_NAME_VAR (oldvuse);
399      if (oldvuse != (build_vuse != NULL_TREE
400		      ? build_vuse : build_vdef))
401	gimple_set_vuse (stmt, NULL_TREE);
402      build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
403    }
404
405  new_list.next = NULL;
406  last = &new_list;
407
408  old_ops = gimple_use_ops (stmt);
409
410  /* Clear a no longer necessary VUSE.  */
411  if (build_vuse == NULL_TREE
412      && gimple_vuse (stmt) != NULL_TREE)
413    gimple_set_vuse (stmt, NULL_TREE);
414
415  /* If there is anything in the old list, free it.  */
416  if (old_ops)
417    {
418      for (ptr = old_ops; ptr->next; ptr = ptr->next)
419	delink_imm_use (USE_OP_PTR (ptr));
420      delink_imm_use (USE_OP_PTR (ptr));
421      ptr->next = gimple_ssa_operands (fn)->free_uses;
422      gimple_ssa_operands (fn)->free_uses = old_ops;
423    }
424
425  /* If we added a VUSE, make sure to set the operand if it is not already
426     present and mark it for renaming.  */
427  if (build_vuse != NULL_TREE
428      && gimple_vuse (stmt) == NULL_TREE)
429    {
430      gimple_set_vuse (stmt, gimple_vop (fn));
431      fn->gimple_df->rename_vops = 1;
432      fn->gimple_df->ssa_renaming_needed = 1;
433    }
434
435  /* Now create nodes for all the new nodes.  */
436  for (new_i = 0; new_i < build_uses.length (); new_i++)
437    {
438      tree *op = (tree *) build_uses[new_i];
439      last = add_use_op (fn, stmt, op, last);
440    }
441
442  /* Now set the stmt's operands.  */
443  gimple_set_use_ops (stmt, new_list.next);
444}
445
446
447/* Clear the in_list bits and empty the build array for VDEFs and
448   VUSEs.  */
449
450static inline void
451cleanup_build_arrays (void)
452{
453  build_vdef = NULL_TREE;
454  build_vuse = NULL_TREE;
455  build_uses.truncate (0);
456}
457
458
459/* Finalize all the build vectors, fill the new ones into INFO.  */
460
461static inline void
462finalize_ssa_stmt_operands (struct function *fn, gimple stmt)
463{
464  finalize_ssa_defs (fn, stmt);
465  finalize_ssa_uses (fn, stmt);
466  cleanup_build_arrays ();
467}
468
469
470/* Start the process of building up operands vectors in INFO.  */
471
472static inline void
473start_ssa_stmt_operands (void)
474{
475  gcc_assert (build_uses.length () == 0);
476  gcc_assert (build_vuse == NULL_TREE);
477  gcc_assert (build_vdef == NULL_TREE);
478}
479
480
481/* Add USE_P to the list of pointers to operands.  */
482
483static inline void
484append_use (tree *use_p)
485{
486  build_uses.safe_push ((tree) use_p);
487}
488
489
490/* Add VAR to the set of variables that require a VDEF operator.  */
491
492static inline void
493append_vdef (tree var)
494{
495  gcc_assert ((build_vdef == NULL_TREE
496	       || build_vdef == var)
497	      && (build_vuse == NULL_TREE
498		  || build_vuse == var));
499
500  build_vdef = var;
501  build_vuse = var;
502}
503
504
505/* Add VAR to the set of variables that require a VUSE operator.  */
506
507static inline void
508append_vuse (tree var)
509{
510  gcc_assert (build_vuse == NULL_TREE
511	      || build_vuse == var);
512
513  build_vuse = var;
514}
515
516/* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
517
518static void
519add_virtual_operand (struct function *fn,
520		     gimple stmt ATTRIBUTE_UNUSED, int flags)
521{
522  /* Add virtual operands to the stmt, unless the caller has specifically
523     requested not to do that (used when adding operands inside an
524     ADDR_EXPR expression).  */
525  if (flags & opf_no_vops)
526    return;
527
528  gcc_assert (!is_gimple_debug (stmt));
529
530  if (flags & opf_def)
531    append_vdef (gimple_vop (fn));
532  else
533    append_vuse (gimple_vop (fn));
534}
535
536
537/* Add *VAR_P to the appropriate operand array for statement STMT.
538   FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
539   it will be added to the statement's real operands, otherwise it is
540   added to virtual operands.  */
541
542static void
543add_stmt_operand (struct function *fn, tree *var_p, gimple stmt, int flags)
544{
545  tree var = *var_p;
546
547  gcc_assert (SSA_VAR_P (*var_p));
548
549  if (is_gimple_reg (var))
550    {
551      /* The variable is a GIMPLE register.  Add it to real operands.  */
552      if (flags & opf_def)
553	;
554      else
555	append_use (var_p);
556      if (DECL_P (*var_p))
557	fn->gimple_df->ssa_renaming_needed = 1;
558    }
559  else
560    {
561      /* Mark statements with volatile operands.  */
562      if (!(flags & opf_no_vops)
563	  && TREE_THIS_VOLATILE (var))
564	gimple_set_has_volatile_ops (stmt, true);
565
566      /* The variable is a memory access.  Add virtual operands.  */
567      add_virtual_operand (fn, stmt, flags);
568    }
569}
570
571/* Mark the base address of REF as having its address taken.
572   REF may be a single variable whose address has been taken or any
573   other valid GIMPLE memory reference (structure reference, array,
574   etc).  */
575
576static void
577mark_address_taken (tree ref)
578{
579  tree var;
580
581  /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
582     as the only thing we take the address of.  If VAR is a structure,
583     taking the address of a field means that the whole structure may
584     be referenced using pointer arithmetic.  See PR 21407 and the
585     ensuing mailing list discussion.  */
586  var = get_base_address (ref);
587  if (var)
588    {
589      if (DECL_P (var))
590	TREE_ADDRESSABLE (var) = 1;
591      else if (TREE_CODE (var) == MEM_REF
592	       && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
593	       && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
594	TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
595    }
596}
597
598
599/* A subroutine of get_expr_operands to handle MEM_REF.
600
601   STMT is the statement being processed, EXPR is the MEM_REF
602      that got us here.
603
604   FLAGS is as in get_expr_operands.  */
605
606static void
607get_mem_ref_operands (struct function *fn,
608		      gimple stmt, tree expr, int flags)
609{
610  tree *pptr = &TREE_OPERAND (expr, 0);
611
612  if (!(flags & opf_no_vops)
613      && TREE_THIS_VOLATILE (expr))
614    gimple_set_has_volatile_ops (stmt, true);
615
616  /* Add the VOP.  */
617  add_virtual_operand (fn, stmt, flags);
618
619  /* If requested, add a USE operand for the base pointer.  */
620  get_expr_operands (fn, stmt, pptr,
621		     opf_non_addressable | opf_use
622		     | (flags & (opf_no_vops|opf_not_non_addressable)));
623}
624
625
626/* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
627
628static void
629get_tmr_operands (struct function *fn, gimple stmt, tree expr, int flags)
630{
631  if (!(flags & opf_no_vops)
632      && TREE_THIS_VOLATILE (expr))
633    gimple_set_has_volatile_ops (stmt, true);
634
635  /* First record the real operands.  */
636  get_expr_operands (fn, stmt,
637		     &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
638  get_expr_operands (fn, stmt,
639		     &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
640  get_expr_operands (fn, stmt,
641		     &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
642
643  add_virtual_operand (fn, stmt, flags);
644}
645
646
647/* If STMT is a call that may clobber globals and other symbols that
648   escape, add them to the VDEF/VUSE lists for it.  */
649
650static void
651maybe_add_call_vops (struct function *fn, gcall *stmt)
652{
653  int call_flags = gimple_call_flags (stmt);
654
655  /* If aliases have been computed already, add VDEF or VUSE
656     operands for all the symbols that have been found to be
657     call-clobbered.  */
658  if (!(call_flags & ECF_NOVOPS))
659    {
660      /* A 'pure' or a 'const' function never call-clobbers anything.  */
661      if (!(call_flags & (ECF_PURE | ECF_CONST)))
662	add_virtual_operand (fn, stmt, opf_def);
663      else if (!(call_flags & ECF_CONST))
664	add_virtual_operand (fn, stmt, opf_use);
665    }
666}
667
668
669/* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
670
671static void
672get_asm_stmt_operands (struct function *fn, gasm *stmt)
673{
674  size_t i, noutputs;
675  const char **oconstraints;
676  const char *constraint;
677  bool allows_mem, allows_reg, is_inout;
678
679  noutputs = gimple_asm_noutputs (stmt);
680  oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
681
682  /* Gather all output operands.  */
683  for (i = 0; i < gimple_asm_noutputs (stmt); i++)
684    {
685      tree link = gimple_asm_output_op (stmt, i);
686      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
687      oconstraints[i] = constraint;
688      parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
689	                       &allows_reg, &is_inout);
690
691      /* This should have been split in gimplify_asm_expr.  */
692      gcc_assert (!allows_reg || !is_inout);
693
694      /* Memory operands are addressable.  Note that STMT needs the
695	 address of this operand.  */
696      if (!allows_reg && allows_mem)
697	mark_address_taken (TREE_VALUE (link));
698
699      get_expr_operands (fn, stmt,
700			 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
701    }
702
703  /* Gather all input operands.  */
704  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
705    {
706      tree link = gimple_asm_input_op (stmt, i);
707      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
708      parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
709	                      &allows_mem, &allows_reg);
710
711      /* Memory operands are addressable.  Note that STMT needs the
712	 address of this operand.  */
713      if (!allows_reg && allows_mem)
714	mark_address_taken (TREE_VALUE (link));
715
716      get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
717    }
718
719  /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
720  if (gimple_asm_clobbers_memory_p (stmt))
721    add_virtual_operand (fn, stmt, opf_def);
722}
723
724
725/* Recursively scan the expression pointed to by EXPR_P in statement
726   STMT.  FLAGS is one of the OPF_* constants modifying how to
727   interpret the operands found.  */
728
729static void
730get_expr_operands (struct function *fn, gimple stmt, tree *expr_p, int flags)
731{
732  enum tree_code code;
733  enum tree_code_class codeclass;
734  tree expr = *expr_p;
735  int uflags = opf_use;
736
737  if (expr == NULL)
738    return;
739
740  if (is_gimple_debug (stmt))
741    uflags |= (flags & opf_no_vops);
742
743  code = TREE_CODE (expr);
744  codeclass = TREE_CODE_CLASS (code);
745
746  switch (code)
747    {
748    case ADDR_EXPR:
749      /* Taking the address of a variable does not represent a
750	 reference to it, but the fact that the statement takes its
751	 address will be of interest to some passes (e.g. alias
752	 resolution).  */
753      if ((!(flags & opf_non_addressable)
754	   || (flags & opf_not_non_addressable))
755	  && !is_gimple_debug (stmt))
756	mark_address_taken (TREE_OPERAND (expr, 0));
757
758      /* Otherwise, there may be variables referenced inside but there
759	 should be no VUSEs created, since the referenced objects are
760	 not really accessed.  The only operands that we should find
761	 here are ARRAY_REF indices which will always be real operands
762	 (GIMPLE does not allow non-registers as array indices).  */
763      flags |= opf_no_vops;
764      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
765			 flags | opf_not_non_addressable | opf_address_taken);
766      return;
767
768    case SSA_NAME:
769    case VAR_DECL:
770    case PARM_DECL:
771    case RESULT_DECL:
772      if (!(flags & opf_address_taken))
773	add_stmt_operand (fn, expr_p, stmt, flags);
774      return;
775
776    case DEBUG_EXPR_DECL:
777      gcc_assert (gimple_debug_bind_p (stmt));
778      return;
779
780    case MEM_REF:
781      get_mem_ref_operands (fn, stmt, expr, flags);
782      return;
783
784    case TARGET_MEM_REF:
785      get_tmr_operands (fn, stmt, expr, flags);
786      return;
787
788    case ARRAY_REF:
789    case ARRAY_RANGE_REF:
790    case COMPONENT_REF:
791    case REALPART_EXPR:
792    case IMAGPART_EXPR:
793      {
794	if (!(flags & opf_no_vops)
795	    && TREE_THIS_VOLATILE (expr))
796	  gimple_set_has_volatile_ops (stmt, true);
797
798	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
799
800	if (code == COMPONENT_REF)
801	  {
802	    if (!(flags & opf_no_vops)
803		&& TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
804	      gimple_set_has_volatile_ops (stmt, true);
805	    get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
806	  }
807	else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
808	  {
809            get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
810            get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
811            get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
812	  }
813
814	return;
815      }
816
817    case WITH_SIZE_EXPR:
818      /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
819	 and an rvalue reference to its second argument.  */
820      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
821      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
822      return;
823
824    case COND_EXPR:
825    case VEC_COND_EXPR:
826    case VEC_PERM_EXPR:
827      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
828      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
829      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
830      return;
831
832    case CONSTRUCTOR:
833      {
834	/* General aggregate CONSTRUCTORs have been decomposed, but they
835	   are still in use as the COMPLEX_EXPR equivalent for vectors.  */
836	constructor_elt *ce;
837	unsigned HOST_WIDE_INT idx;
838
839	/* A volatile constructor is actually TREE_CLOBBER_P, transfer
840	   the volatility to the statement, don't use TREE_CLOBBER_P for
841	   mirroring the other uses of THIS_VOLATILE in this file.  */
842	if (!(flags & opf_no_vops)
843	    && TREE_THIS_VOLATILE (expr))
844	  gimple_set_has_volatile_ops (stmt, true);
845
846	for (idx = 0;
847	     vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
848	     idx++)
849	  get_expr_operands (fn, stmt, &ce->value, uflags);
850
851	return;
852      }
853
854    case BIT_FIELD_REF:
855      if (!(flags & opf_no_vops)
856	  && TREE_THIS_VOLATILE (expr))
857	gimple_set_has_volatile_ops (stmt, true);
858      /* FALLTHRU */
859
860    case VIEW_CONVERT_EXPR:
861    do_unary:
862      get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
863      return;
864
865    case COMPOUND_EXPR:
866    case OBJ_TYPE_REF:
867    case ASSERT_EXPR:
868    do_binary:
869      {
870	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
871	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
872	return;
873      }
874
875    case DOT_PROD_EXPR:
876    case SAD_EXPR:
877    case REALIGN_LOAD_EXPR:
878    case WIDEN_MULT_PLUS_EXPR:
879    case WIDEN_MULT_MINUS_EXPR:
880    case FMA_EXPR:
881      {
882	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
883	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
884	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
885	return;
886      }
887
888    case FUNCTION_DECL:
889    case LABEL_DECL:
890    case CONST_DECL:
891    case CASE_LABEL_EXPR:
892      /* Expressions that make no memory references.  */
893      return;
894
895    default:
896      if (codeclass == tcc_unary)
897	goto do_unary;
898      if (codeclass == tcc_binary || codeclass == tcc_comparison)
899	goto do_binary;
900      if (codeclass == tcc_constant || codeclass == tcc_type)
901	return;
902    }
903
904  /* If we get here, something has gone wrong.  */
905#ifdef ENABLE_CHECKING
906  fprintf (stderr, "unhandled expression in get_expr_operands():\n");
907  debug_tree (expr);
908  fputs ("\n", stderr);
909#endif
910  gcc_unreachable ();
911}
912
913
914/* Parse STMT looking for operands.  When finished, the various
915   build_* operand vectors will have potential operands in them.  */
916
917static void
918parse_ssa_operands (struct function *fn, gimple stmt)
919{
920  enum gimple_code code = gimple_code (stmt);
921  size_t i, n, start = 0;
922
923  switch (code)
924    {
925    case GIMPLE_ASM:
926      get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
927      break;
928
929    case GIMPLE_TRANSACTION:
930      /* The start of a transaction is a memory barrier.  */
931      add_virtual_operand (fn, stmt, opf_def | opf_use);
932      break;
933
934    case GIMPLE_DEBUG:
935      if (gimple_debug_bind_p (stmt)
936	  && gimple_debug_bind_has_value_p (stmt))
937	get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
938			   opf_use | opf_no_vops);
939      break;
940
941    case GIMPLE_RETURN:
942      append_vuse (gimple_vop (fn));
943      goto do_default;
944
945    case GIMPLE_CALL:
946      /* Add call-clobbered operands, if needed.  */
947      maybe_add_call_vops (fn, as_a <gcall *> (stmt));
948      /* FALLTHRU */
949
950    case GIMPLE_ASSIGN:
951      get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
952      start = 1;
953      /* FALLTHRU */
954
955    default:
956    do_default:
957      n = gimple_num_ops (stmt);
958      for (i = start; i < n; i++)
959	get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
960      break;
961    }
962}
963
964
965/* Create an operands cache for STMT.  */
966
967static void
968build_ssa_operands (struct function *fn, gimple stmt)
969{
970  /* Initially assume that the statement has no volatile operands.  */
971  gimple_set_has_volatile_ops (stmt, false);
972
973  start_ssa_stmt_operands ();
974  parse_ssa_operands (fn, stmt);
975  finalize_ssa_stmt_operands (fn, stmt);
976}
977
978/* Verifies SSA statement operands.  */
979
980DEBUG_FUNCTION bool
981verify_ssa_operands (struct function *fn, gimple stmt)
982{
983  use_operand_p use_p;
984  def_operand_p def_p;
985  ssa_op_iter iter;
986  unsigned i;
987  tree use, def;
988  bool volatile_p = gimple_has_volatile_ops (stmt);
989
990  /* build_ssa_operands w/o finalizing them.  */
991  gimple_set_has_volatile_ops (stmt, false);
992  start_ssa_stmt_operands ();
993  parse_ssa_operands (fn, stmt);
994
995  /* Now verify the built operands are the same as present in STMT.  */
996  def = gimple_vdef (stmt);
997  if (def
998      && TREE_CODE (def) == SSA_NAME)
999    def = SSA_NAME_VAR (def);
1000  if (build_vdef != def)
1001    {
1002      error ("virtual definition of statement not up-to-date");
1003      return true;
1004    }
1005  if (gimple_vdef (stmt)
1006      && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
1007	  || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
1008    {
1009      error ("virtual def operand missing for stmt");
1010      return true;
1011    }
1012
1013  use = gimple_vuse (stmt);
1014  if (use
1015      && TREE_CODE (use) == SSA_NAME)
1016    use = SSA_NAME_VAR (use);
1017  if (build_vuse != use)
1018    {
1019      error ("virtual use of statement not up-to-date");
1020      return true;
1021    }
1022  if (gimple_vuse (stmt)
1023      && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1024	  || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1025    {
1026      error ("virtual use operand missing for stmt");
1027      return true;
1028    }
1029
1030  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1031    {
1032      FOR_EACH_VEC_ELT (build_uses, i, use)
1033	{
1034	  if (use_p->use == (tree *)use)
1035	    {
1036	      build_uses[i] = NULL_TREE;
1037	      break;
1038	    }
1039	}
1040      if (i == build_uses.length ())
1041	{
1042	  error ("excess use operand for stmt");
1043	  debug_generic_expr (USE_FROM_PTR (use_p));
1044	  return true;
1045	}
1046    }
1047  FOR_EACH_VEC_ELT (build_uses, i, use)
1048    if (use != NULL_TREE)
1049      {
1050	error ("use operand missing for stmt");
1051	debug_generic_expr (*(tree *)use);
1052	return true;
1053      }
1054
1055  if (gimple_has_volatile_ops (stmt) != volatile_p)
1056    {
1057      error ("stmt volatile flag not up-to-date");
1058      return true;
1059    }
1060
1061  cleanup_build_arrays ();
1062  return false;
1063}
1064
1065
1066/* Releases the operands of STMT back to their freelists, and clears
1067   the stmt operand lists.  */
1068
1069void
1070free_stmt_operands (struct function *fn, gimple stmt)
1071{
1072  use_optype_p uses = gimple_use_ops (stmt), last_use;
1073
1074  if (uses)
1075    {
1076      for (last_use = uses; last_use->next; last_use = last_use->next)
1077	delink_imm_use (USE_OP_PTR (last_use));
1078      delink_imm_use (USE_OP_PTR (last_use));
1079      last_use->next = gimple_ssa_operands (fn)->free_uses;
1080      gimple_ssa_operands (fn)->free_uses = uses;
1081      gimple_set_use_ops (stmt, NULL);
1082    }
1083
1084  if (gimple_has_mem_ops (stmt))
1085    {
1086      gimple_set_vuse (stmt, NULL_TREE);
1087      gimple_set_vdef (stmt, NULL_TREE);
1088    }
1089}
1090
1091
1092/* Get the operands of statement STMT.  */
1093
1094void
1095update_stmt_operands (struct function *fn, gimple stmt)
1096{
1097  /* If update_stmt_operands is called before SSA is initialized, do
1098     nothing.  */
1099  if (!ssa_operands_active (fn))
1100    return;
1101
1102  timevar_push (TV_TREE_OPS);
1103
1104  gcc_assert (gimple_modified_p (stmt));
1105  build_ssa_operands (fn, stmt);
1106  gimple_set_modified (stmt, false);
1107
1108  timevar_pop (TV_TREE_OPS);
1109}
1110
1111
1112/* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
1113   to test the validity of the swap operation.  */
1114
1115void
1116swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
1117{
1118  tree op0, op1;
1119  op0 = *exp0;
1120  op1 = *exp1;
1121
1122  if (op0 != op1)
1123    {
1124      /* Attempt to preserve the relative positions of these two operands in
1125	 their * respective immediate use lists by adjusting their use pointer
1126	 to point to the new operand position.  */
1127      use_optype_p use0, use1, ptr;
1128      use0 = use1 = NULL;
1129
1130      /* Find the 2 operands in the cache, if they are there.  */
1131      for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1132	if (USE_OP_PTR (ptr)->use == exp0)
1133	  {
1134	    use0 = ptr;
1135	    break;
1136	  }
1137
1138      for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1139	if (USE_OP_PTR (ptr)->use == exp1)
1140	  {
1141	    use1 = ptr;
1142	    break;
1143	  }
1144
1145      /* And adjust their location to point to the new position of the
1146         operand.  */
1147      if (use0)
1148	USE_OP_PTR (use0)->use = exp1;
1149      if (use1)
1150	USE_OP_PTR (use1)->use = exp0;
1151
1152      /* Now swap the data.  */
1153      *exp0 = op1;
1154      *exp1 = op0;
1155    }
1156}
1157
1158
1159/* Scan the immediate_use list for VAR making sure its linked properly.
1160   Return TRUE if there is a problem and emit an error message to F.  */
1161
1162DEBUG_FUNCTION bool
1163verify_imm_links (FILE *f, tree var)
1164{
1165  use_operand_p ptr, prev, list;
1166  int count;
1167
1168  gcc_assert (TREE_CODE (var) == SSA_NAME);
1169
1170  list = &(SSA_NAME_IMM_USE_NODE (var));
1171  gcc_assert (list->use == NULL);
1172
1173  if (list->prev == NULL)
1174    {
1175      gcc_assert (list->next == NULL);
1176      return false;
1177    }
1178
1179  prev = list;
1180  count = 0;
1181  for (ptr = list->next; ptr != list; )
1182    {
1183      if (prev != ptr->prev)
1184	goto error;
1185
1186      if (ptr->use == NULL)
1187	goto error; /* 2 roots, or SAFE guard node.  */
1188      else if (*(ptr->use) != var)
1189	goto error;
1190
1191      prev = ptr;
1192      ptr = ptr->next;
1193
1194      /* Avoid infinite loops.  50,000,000 uses probably indicates a
1195	 problem.  */
1196      if (count++ > 50000000)
1197	goto error;
1198    }
1199
1200  /* Verify list in the other direction.  */
1201  prev = list;
1202  for (ptr = list->prev; ptr != list; )
1203    {
1204      if (prev != ptr->next)
1205	goto error;
1206      prev = ptr;
1207      ptr = ptr->prev;
1208      if (count-- < 0)
1209	goto error;
1210    }
1211
1212  if (count != 0)
1213    goto error;
1214
1215  return false;
1216
1217 error:
1218  if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1219    {
1220      fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1221      print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1222    }
1223  fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1224	   (void *)ptr->use);
1225  print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1226  fprintf (f, "\n");
1227  return true;
1228}
1229
1230
1231/* Dump all the immediate uses to FILE.  */
1232
1233void
1234dump_immediate_uses_for (FILE *file, tree var)
1235{
1236  imm_use_iterator iter;
1237  use_operand_p use_p;
1238
1239  gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1240
1241  print_generic_expr (file, var, TDF_SLIM);
1242  fprintf (file, " : -->");
1243  if (has_zero_uses (var))
1244    fprintf (file, " no uses.\n");
1245  else
1246    if (has_single_use (var))
1247      fprintf (file, " single use.\n");
1248    else
1249      fprintf (file, "%d uses.\n", num_imm_uses (var));
1250
1251  FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1252    {
1253      if (use_p->loc.stmt == NULL && use_p->use == NULL)
1254        fprintf (file, "***end of stmt iterator marker***\n");
1255      else
1256	if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1257	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1258	else
1259	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1260    }
1261  fprintf (file, "\n");
1262}
1263
1264
1265/* Dump all the immediate uses to FILE.  */
1266
1267void
1268dump_immediate_uses (FILE *file)
1269{
1270  tree var;
1271  unsigned int x;
1272
1273  fprintf (file, "Immediate_uses: \n\n");
1274  for (x = 1; x < num_ssa_names; x++)
1275    {
1276      var = ssa_name (x);
1277      if (!var)
1278        continue;
1279      dump_immediate_uses_for (file, var);
1280    }
1281}
1282
1283
1284/* Dump def-use edges on stderr.  */
1285
1286DEBUG_FUNCTION void
1287debug_immediate_uses (void)
1288{
1289  dump_immediate_uses (stderr);
1290}
1291
1292
1293/* Dump def-use edges on stderr.  */
1294
1295DEBUG_FUNCTION void
1296debug_immediate_uses_for (tree var)
1297{
1298  dump_immediate_uses_for (stderr, var);
1299}
1300
1301
1302/* Unlink STMTs virtual definition from the IL by propagating its use.  */
1303
1304void
1305unlink_stmt_vdef (gimple stmt)
1306{
1307  use_operand_p use_p;
1308  imm_use_iterator iter;
1309  gimple use_stmt;
1310  tree vdef = gimple_vdef (stmt);
1311  tree vuse = gimple_vuse (stmt);
1312
1313  if (!vdef
1314      || TREE_CODE (vdef) != SSA_NAME)
1315    return;
1316
1317  FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1318    {
1319      FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1320	SET_USE (use_p, vuse);
1321    }
1322
1323  if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1324    SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1325}
1326
1327
1328/* Return true if the var whose chain of uses starts at PTR has no
1329   nondebug uses.  */
1330bool
1331has_zero_uses_1 (const ssa_use_operand_t *head)
1332{
1333  const ssa_use_operand_t *ptr;
1334
1335  for (ptr = head->next; ptr != head; ptr = ptr->next)
1336    if (!is_gimple_debug (USE_STMT (ptr)))
1337      return false;
1338
1339  return true;
1340}
1341
1342
1343/* Return true if the var whose chain of uses starts at PTR has a
1344   single nondebug use.  Set USE_P and STMT to that single nondebug
1345   use, if so, or to NULL otherwise.  */
1346bool
1347single_imm_use_1 (const ssa_use_operand_t *head,
1348		  use_operand_p *use_p, gimple *stmt)
1349{
1350  ssa_use_operand_t *ptr, *single_use = 0;
1351
1352  for (ptr = head->next; ptr != head; ptr = ptr->next)
1353    if (!is_gimple_debug (USE_STMT (ptr)))
1354      {
1355	if (single_use)
1356	  {
1357	    single_use = NULL;
1358	    break;
1359	  }
1360	single_use = ptr;
1361      }
1362
1363  if (use_p)
1364    *use_p = single_use;
1365
1366  if (stmt)
1367    *stmt = single_use ? single_use->loc.stmt : NULL;
1368
1369  return single_use;
1370}
1371