1169689Skan/* SSA operands management for trees.
2169689Skan   Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3169689Skan
4169689SkanThis file is part of GCC.
5169689Skan
6169689SkanGCC is free software; you can redistribute it and/or modify
7169689Skanit under the terms of the GNU General Public License as published by
8169689Skanthe Free Software Foundation; either version 2, or (at your option)
9169689Skanany later version.
10169689Skan
11169689SkanGCC is distributed in the hope that it will be useful,
12169689Skanbut WITHOUT ANY WARRANTY; without even the implied warranty of
13169689SkanMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14169689SkanGNU General Public License for more details.
15169689Skan
16169689SkanYou should have received a copy of the GNU General Public License
17169689Skanalong with GCC; see the file COPYING.  If not, write to
18169689Skanthe Free Software Foundation, 51 Franklin Street, Fifth Floor,
19169689SkanBoston, MA 02110-1301, USA.  */
20169689Skan
21169689Skan#include "config.h"
22169689Skan#include "system.h"
23169689Skan#include "coretypes.h"
24169689Skan#include "tm.h"
25169689Skan#include "tree.h"
26169689Skan#include "flags.h"
27169689Skan#include "function.h"
28169689Skan#include "diagnostic.h"
29169689Skan#include "tree-flow.h"
30169689Skan#include "tree-inline.h"
31169689Skan#include "tree-pass.h"
32169689Skan#include "ggc.h"
33169689Skan#include "timevar.h"
34169689Skan#include "toplev.h"
35169689Skan#include "langhooks.h"
36169689Skan#include "ipa-reference.h"
37169689Skan
38169689Skan/* This file contains the code required to manage the operands cache of the
39169689Skan   SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
40169689Skan   annotation.  This cache contains operands that will be of interest to
41169689Skan   optimizers and other passes wishing to manipulate the IL.
42169689Skan
43169689Skan   The operand type are broken up into REAL and VIRTUAL operands.  The real
44169689Skan   operands are represented as pointers into the stmt's operand tree.  Thus
45169689Skan   any manipulation of the real operands will be reflected in the actual tree.
46169689Skan   Virtual operands are represented solely in the cache, although the base
47169689Skan   variable for the SSA_NAME may, or may not occur in the stmt's tree.
48169689Skan   Manipulation of the virtual operands will not be reflected in the stmt tree.
49169689Skan
50169689Skan   The routines in this file are concerned with creating this operand cache
51169689Skan   from a stmt tree.
52169689Skan
53169689Skan   The operand tree is the parsed by the various get_* routines which look
54169689Skan   through the stmt tree for the occurrence of operands which may be of
55169689Skan   interest, and calls are made to the append_* routines whenever one is
56169689Skan   found.  There are 5 of these routines, each representing one of the
57169689Skan   5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58169689Skan   Virtual Must Defs.
59169689Skan
60169689Skan   The append_* routines check for duplication, and simply keep a list of
61169689Skan   unique objects for each operand type in the build_* extendable vectors.
62169689Skan
63169689Skan   Once the stmt tree is completely parsed, the finalize_ssa_operands()
64169689Skan   routine is called, which proceeds to perform the finalization routine
65169689Skan   on each of the 5 operand vectors which have been built up.
66169689Skan
67169689Skan   If the stmt had a previous operand cache, the finalization routines
68169689Skan   attempt to match up the new operands with the old ones.  If it's a perfect
69169689Skan   match, the old vector is simply reused.  If it isn't a perfect match, then
70169689Skan   a new vector is created and the new operands are placed there.  For
71169689Skan   virtual operands, if the previous cache had SSA_NAME version of a
72169689Skan   variable, and that same variable occurs in the same operands cache, then
73169689Skan   the new cache vector will also get the same SSA_NAME.
74169689Skan
75169689Skan  i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76169689Skan  vector for VUSE, then the new vector will also be modified such that
77169689Skan  it contains 'a_5' rather than 'a'.  */
78169689Skan
79169689Skan/* Flags to describe operand properties in helpers.  */
80169689Skan
81169689Skan/* By default, operands are loaded.  */
82169689Skan#define opf_none	0
83169689Skan
84169689Skan/* Operand is the target of an assignment expression or a
85169689Skan   call-clobbered variable.  */
86169689Skan#define opf_is_def 	(1 << 0)
87169689Skan
88169689Skan/* Operand is the target of an assignment expression.  */
89169689Skan#define opf_kill_def 	(1 << 1)
90169689Skan
91169689Skan/* No virtual operands should be created in the expression.  This is used
92169689Skan   when traversing ADDR_EXPR nodes which have different semantics than
93169689Skan   other expressions.  Inside an ADDR_EXPR node, the only operands that we
94169689Skan   need to consider are indices into arrays.  For instance, &a.b[i] should
95169689Skan   generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96169689Skan   VUSE for 'b'.  */
97169689Skan#define opf_no_vops 	(1 << 2)
98169689Skan
99169689Skan/* Operand is a "non-specific" kill for call-clobbers and such.  This
100169689Skan   is used to distinguish "reset the world" events from explicit
101169689Skan   MODIFY_EXPRs.  */
102169689Skan#define opf_non_specific  (1 << 3)
103169689Skan
104169689Skan/* Array for building all the def operands.  */
105169689Skanstatic VEC(tree,heap) *build_defs;
106169689Skan
107169689Skan/* Array for building all the use operands.  */
108169689Skanstatic VEC(tree,heap) *build_uses;
109169689Skan
110169689Skan/* Array for building all the V_MAY_DEF operands.  */
111169689Skanstatic VEC(tree,heap) *build_v_may_defs;
112169689Skan
113169689Skan/* Array for building all the VUSE operands.  */
114169689Skanstatic VEC(tree,heap) *build_vuses;
115169689Skan
116169689Skan/* Array for building all the V_MUST_DEF operands.  */
117169689Skanstatic VEC(tree,heap) *build_v_must_defs;
118169689Skan
119169689Skan/* These arrays are the cached operand vectors for call clobbered calls.  */
120169689Skanstatic bool ops_active = false;
121169689Skan
122169689Skanstatic GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
123169689Skanstatic unsigned operand_memory_index;
124169689Skan
125169689Skanstatic void get_expr_operands (tree, tree *, int);
126169689Skan
127169689Skanstatic def_optype_p free_defs = NULL;
128169689Skanstatic use_optype_p free_uses = NULL;
129169689Skanstatic vuse_optype_p free_vuses = NULL;
130169689Skanstatic maydef_optype_p free_maydefs = NULL;
131169689Skanstatic mustdef_optype_p free_mustdefs = NULL;
132169689Skan
133169689Skan/* Allocates operand OP of given TYPE from the appropriate free list,
134169689Skan   or of the new value if the list is empty.  */
135169689Skan
136169689Skan#define ALLOC_OPTYPE(OP, TYPE)				\
137169689Skan  do							\
138169689Skan    {							\
139169689Skan      TYPE##_optype_p ret = free_##TYPE##s;		\
140169689Skan      if (ret)						\
141169689Skan	free_##TYPE##s = ret->next;			\
142169689Skan      else						\
143169689Skan	ret = ssa_operand_alloc (sizeof (*ret));	\
144169689Skan      (OP) = ret;					\
145169689Skan    } while (0)
146169689Skan
147169689Skan/* Return the DECL_UID of the base variable of T.  */
148169689Skan
149169689Skanstatic inline unsigned
150169689Skanget_name_decl (tree t)
151169689Skan{
152169689Skan  if (TREE_CODE (t) != SSA_NAME)
153169689Skan    return DECL_UID (t);
154169689Skan  else
155169689Skan    return DECL_UID (SSA_NAME_VAR (t));
156169689Skan}
157169689Skan
158169689Skan
159169689Skan/* Comparison function for qsort used in operand_build_sort_virtual.  */
160169689Skan
161169689Skanstatic int
162169689Skanoperand_build_cmp (const void *p, const void *q)
163169689Skan{
164169689Skan  tree e1 = *((const tree *)p);
165169689Skan  tree e2 = *((const tree *)q);
166169689Skan  unsigned int u1,u2;
167169689Skan
168169689Skan  u1 = get_name_decl (e1);
169169689Skan  u2 = get_name_decl (e2);
170169689Skan
171169689Skan  /* We want to sort in ascending order.  They can never be equal.  */
172169689Skan#ifdef ENABLE_CHECKING
173169689Skan  gcc_assert (u1 != u2);
174169689Skan#endif
175169689Skan  return (u1 > u2 ? 1 : -1);
176169689Skan}
177169689Skan
178169689Skan
179169689Skan/* Sort the virtual operands in LIST from lowest DECL_UID to highest.  */
180169689Skan
181169689Skanstatic inline void
182169689Skanoperand_build_sort_virtual (VEC(tree,heap) *list)
183169689Skan{
184169689Skan  int num = VEC_length (tree, list);
185169689Skan
186169689Skan  if (num < 2)
187169689Skan    return;
188169689Skan
189169689Skan  if (num == 2)
190169689Skan    {
191169689Skan      if (get_name_decl (VEC_index (tree, list, 0))
192169689Skan	  > get_name_decl (VEC_index (tree, list, 1)))
193169689Skan	{
194169689Skan	  /* Swap elements if in the wrong order.  */
195169689Skan	  tree tmp = VEC_index (tree, list, 0);
196169689Skan	  VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
197169689Skan	  VEC_replace (tree, list, 1, tmp);
198169689Skan	}
199169689Skan      return;
200169689Skan    }
201169689Skan
202169689Skan  /* There are 3 or more elements, call qsort.  */
203169689Skan  qsort (VEC_address (tree, list),
204169689Skan	 VEC_length (tree, list),
205169689Skan	 sizeof (tree),
206169689Skan	 operand_build_cmp);
207169689Skan}
208169689Skan
209169689Skan
210169689Skan/*  Return true if the SSA operands cache is active.  */
211169689Skan
212169689Skanbool
213169689Skanssa_operands_active (void)
214169689Skan{
215169689Skan  return ops_active;
216169689Skan}
217169689Skan
218169689Skan
219169689Skan/* Structure storing statistics on how many call clobbers we have, and
220169689Skan   how many where avoided.  */
221169689Skan
222169689Skanstatic struct
223169689Skan{
224169689Skan  /* Number of call-clobbered ops we attempt to add to calls in
225169689Skan     add_call_clobber_ops.  */
226169689Skan  unsigned int clobbered_vars;
227169689Skan
228169689Skan  /* Number of write-clobbers (V_MAY_DEFs) avoided by using
229169689Skan     not_written information.  */
230169689Skan  unsigned int static_write_clobbers_avoided;
231169689Skan
232169689Skan  /* Number of reads (VUSEs) avoided by using not_read information.  */
233169689Skan  unsigned int static_read_clobbers_avoided;
234169689Skan
235169689Skan  /* Number of write-clobbers avoided because the variable can't escape to
236169689Skan     this call.  */
237169689Skan  unsigned int unescapable_clobbers_avoided;
238169689Skan
239169689Skan  /* Number of read-only uses we attempt to add to calls in
240169689Skan     add_call_read_ops.  */
241169689Skan  unsigned int readonly_clobbers;
242169689Skan
243169689Skan  /* Number of read-only uses we avoid using not_read information.  */
244169689Skan  unsigned int static_readonly_clobbers_avoided;
245169689Skan} clobber_stats;
246169689Skan
247169689Skan
248169689Skan/* Initialize the operand cache routines.  */
249169689Skan
250169689Skanvoid
251169689Skaninit_ssa_operands (void)
252169689Skan{
253169689Skan  build_defs = VEC_alloc (tree, heap, 5);
254169689Skan  build_uses = VEC_alloc (tree, heap, 10);
255169689Skan  build_vuses = VEC_alloc (tree, heap, 25);
256169689Skan  build_v_may_defs = VEC_alloc (tree, heap, 25);
257169689Skan  build_v_must_defs = VEC_alloc (tree, heap, 25);
258169689Skan
259169689Skan  gcc_assert (operand_memory == NULL);
260169689Skan  operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
261169689Skan  ops_active = true;
262169689Skan  memset (&clobber_stats, 0, sizeof (clobber_stats));
263169689Skan}
264169689Skan
265169689Skan
266169689Skan/* Dispose of anything required by the operand routines.  */
267169689Skan
268169689Skanvoid
269169689Skanfini_ssa_operands (void)
270169689Skan{
271169689Skan  struct ssa_operand_memory_d *ptr;
272169689Skan  VEC_free (tree, heap, build_defs);
273169689Skan  VEC_free (tree, heap, build_uses);
274169689Skan  VEC_free (tree, heap, build_v_must_defs);
275169689Skan  VEC_free (tree, heap, build_v_may_defs);
276169689Skan  VEC_free (tree, heap, build_vuses);
277169689Skan  free_defs = NULL;
278169689Skan  free_uses = NULL;
279169689Skan  free_vuses = NULL;
280169689Skan  free_maydefs = NULL;
281169689Skan  free_mustdefs = NULL;
282169689Skan  while ((ptr = operand_memory) != NULL)
283169689Skan    {
284169689Skan      operand_memory = operand_memory->next;
285169689Skan      ggc_free (ptr);
286169689Skan    }
287169689Skan
288169689Skan  ops_active = false;
289169689Skan
290169689Skan  if (dump_file && (dump_flags & TDF_STATS))
291169689Skan    {
292169689Skan      fprintf (dump_file, "Original clobbered vars:%d\n",
293169689Skan	       clobber_stats.clobbered_vars);
294169689Skan      fprintf (dump_file, "Static write clobbers avoided:%d\n",
295169689Skan	       clobber_stats.static_write_clobbers_avoided);
296169689Skan      fprintf (dump_file, "Static read clobbers avoided:%d\n",
297169689Skan	       clobber_stats.static_read_clobbers_avoided);
298169689Skan      fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299169689Skan	       clobber_stats.unescapable_clobbers_avoided);
300169689Skan      fprintf (dump_file, "Original read-only clobbers:%d\n",
301169689Skan	       clobber_stats.readonly_clobbers);
302169689Skan      fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
303169689Skan	       clobber_stats.static_readonly_clobbers_avoided);
304169689Skan    }
305169689Skan}
306169689Skan
307169689Skan
308169689Skan/* Return memory for operands of SIZE chunks.  */
309169689Skan
310169689Skanstatic inline void *
311169689Skanssa_operand_alloc (unsigned size)
312169689Skan{
313169689Skan  char *ptr;
314169689Skan  if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
315169689Skan    {
316169689Skan      struct ssa_operand_memory_d *ptr;
317169689Skan      ptr = GGC_NEW (struct ssa_operand_memory_d);
318169689Skan      ptr->next = operand_memory;
319169689Skan      operand_memory = ptr;
320169689Skan      operand_memory_index = 0;
321169689Skan    }
322169689Skan  ptr = &(operand_memory->mem[operand_memory_index]);
323169689Skan  operand_memory_index += size;
324169689Skan  return ptr;
325169689Skan}
326169689Skan
327169689Skan
328169689Skan
329169689Skan/* This routine makes sure that PTR is in an immediate use list, and makes
330169689Skan   sure the stmt pointer is set to the current stmt.  */
331169689Skan
332169689Skanstatic inline void
333169689Skanset_virtual_use_link (use_operand_p ptr, tree stmt)
334169689Skan{
335169689Skan  /*  fold_stmt may have changed the stmt pointers.  */
336169689Skan  if (ptr->stmt != stmt)
337169689Skan    ptr->stmt = stmt;
338169689Skan
339169689Skan  /* If this use isn't in a list, add it to the correct list.  */
340169689Skan  if (!ptr->prev)
341169689Skan    link_imm_use (ptr, *(ptr->use));
342169689Skan}
343169689Skan
344169689Skan/* Appends ELT after TO, and moves the TO pointer to ELT.  */
345169689Skan
346169689Skan#define APPEND_OP_AFTER(ELT, TO)	\
347169689Skan  do					\
348169689Skan    {					\
349169689Skan      (TO)->next = (ELT);		\
350169689Skan      (TO) = (ELT);			\
351169689Skan    } while (0)
352169689Skan
353169689Skan/* Appends head of list FROM after TO, and move both pointers
354169689Skan   to their successors.  */
355169689Skan
356169689Skan#define MOVE_HEAD_AFTER(FROM, TO)	\
357169689Skan  do					\
358169689Skan    {					\
359169689Skan      APPEND_OP_AFTER (FROM, TO);	\
360169689Skan      (FROM) = (FROM)->next;		\
361169689Skan    } while (0)
362169689Skan
363169689Skan/* Moves OP to appropriate freelist.  OP is set to its successor.  */
364169689Skan
365169689Skan#define MOVE_HEAD_TO_FREELIST(OP, TYPE)			\
366169689Skan  do							\
367169689Skan    {							\
368169689Skan      TYPE##_optype_p next = (OP)->next;		\
369169689Skan      (OP)->next = free_##TYPE##s;			\
370169689Skan      free_##TYPE##s = (OP);				\
371169689Skan      (OP) = next;					\
372169689Skan    } while (0)
373169689Skan
374169689Skan/* Initializes immediate use at USE_PTR to value VAL, and links it to the list
375169689Skan   of immediate uses.  STMT is the current statement.  */
376169689Skan
377169689Skan#define INITIALIZE_USE(USE_PTR, VAL, STMT)		\
378169689Skan  do							\
379169689Skan    {							\
380169689Skan      (USE_PTR)->use = (VAL);				\
381169689Skan      link_imm_use_stmt ((USE_PTR), *(VAL), (STMT));	\
382169689Skan    } while (0)
383169689Skan
384169689Skan/* Adds OP to the list of defs after LAST, and moves
385169689Skan   LAST to the new element.  */
386169689Skan
387169689Skanstatic inline void
388169689Skanadd_def_op (tree *op, def_optype_p *last)
389169689Skan{
390169689Skan  def_optype_p new;
391169689Skan
392169689Skan  ALLOC_OPTYPE (new, def);
393169689Skan  DEF_OP_PTR (new) = op;
394169689Skan  APPEND_OP_AFTER (new, *last);
395169689Skan}
396169689Skan
397169689Skan/* Adds OP to the list of uses of statement STMT after LAST, and moves
398169689Skan   LAST to the new element.  */
399169689Skan
400169689Skanstatic inline void
401169689Skanadd_use_op (tree stmt, tree *op, use_optype_p *last)
402169689Skan{
403169689Skan  use_optype_p new;
404169689Skan
405169689Skan  ALLOC_OPTYPE (new, use);
406169689Skan  INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
407169689Skan  APPEND_OP_AFTER (new, *last);
408169689Skan}
409169689Skan
410169689Skan/* Adds OP to the list of vuses of statement STMT after LAST, and moves
411169689Skan   LAST to the new element.  */
412169689Skan
413169689Skanstatic inline void
414169689Skanadd_vuse_op (tree stmt, tree op, vuse_optype_p *last)
415169689Skan{
416169689Skan  vuse_optype_p new;
417169689Skan
418169689Skan  ALLOC_OPTYPE (new, vuse);
419169689Skan  VUSE_OP (new) = op;
420169689Skan  INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
421169689Skan  APPEND_OP_AFTER (new, *last);
422169689Skan}
423169689Skan
424169689Skan/* Adds OP to the list of maydefs of statement STMT after LAST, and moves
425169689Skan   LAST to the new element.  */
426169689Skan
427169689Skanstatic inline void
428169689Skanadd_maydef_op (tree stmt, tree op, maydef_optype_p *last)
429169689Skan{
430169689Skan  maydef_optype_p new;
431169689Skan
432169689Skan  ALLOC_OPTYPE (new, maydef);
433169689Skan  MAYDEF_RESULT (new) = op;
434169689Skan  MAYDEF_OP (new) = op;
435169689Skan  INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
436169689Skan  APPEND_OP_AFTER (new, *last);
437169689Skan}
438169689Skan
439169689Skan/* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
440169689Skan   LAST to the new element.  */
441169689Skan
442169689Skanstatic inline void
443169689Skanadd_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
444169689Skan{
445169689Skan  mustdef_optype_p new;
446169689Skan
447169689Skan  ALLOC_OPTYPE (new, mustdef);
448169689Skan  MUSTDEF_RESULT (new) = op;
449169689Skan  MUSTDEF_KILL (new) = op;
450169689Skan  INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
451169689Skan  APPEND_OP_AFTER (new, *last);
452169689Skan}
453169689Skan
454169689Skan/* Takes elements from build_defs and turns them into def operands of STMT.
455169689Skan   TODO -- Given that def operands list is not necessarily sorted, merging
456169689Skan	   the operands this way does not make much sense.
457169689Skan	-- Make build_defs VEC of tree *.  */
458169689Skan
459169689Skanstatic inline void
460169689Skanfinalize_ssa_def_ops (tree stmt)
461169689Skan{
462169689Skan  unsigned new_i;
463169689Skan  struct def_optype_d new_list;
464169689Skan  def_optype_p old_ops, last;
465169689Skan  tree *old_base;
466169689Skan
467169689Skan  new_list.next = NULL;
468169689Skan  last = &new_list;
469169689Skan
470169689Skan  old_ops = DEF_OPS (stmt);
471169689Skan
472169689Skan  new_i = 0;
473169689Skan  while (old_ops && new_i < VEC_length (tree, build_defs))
474169689Skan    {
475169689Skan      tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
476169689Skan      old_base = DEF_OP_PTR (old_ops);
477169689Skan
478169689Skan      if (old_base == new_base)
479169689Skan        {
480169689Skan	  /* if variables are the same, reuse this node.  */
481169689Skan	  MOVE_HEAD_AFTER (old_ops, last);
482169689Skan	  new_i++;
483169689Skan	}
484169689Skan      else if (old_base < new_base)
485169689Skan	{
486169689Skan	  /* if old is less than new, old goes to the free list.  */
487169689Skan	  MOVE_HEAD_TO_FREELIST (old_ops, def);
488169689Skan	}
489169689Skan      else
490169689Skan	{
491169689Skan	  /* This is a new operand.  */
492169689Skan	  add_def_op (new_base, &last);
493169689Skan	  new_i++;
494169689Skan	}
495169689Skan    }
496169689Skan
497169689Skan  /* If there is anything remaining in the build_defs list, simply emit it.  */
498169689Skan  for ( ; new_i < VEC_length (tree, build_defs); new_i++)
499169689Skan    add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
500169689Skan
501169689Skan  last->next = NULL;
502169689Skan
503169689Skan  /* If there is anything in the old list, free it.  */
504169689Skan  if (old_ops)
505169689Skan    {
506169689Skan      old_ops->next = free_defs;
507169689Skan      free_defs = old_ops;
508169689Skan    }
509169689Skan
510169689Skan  /* Now set the stmt's operands.  */
511169689Skan  DEF_OPS (stmt) = new_list.next;
512169689Skan
513169689Skan#ifdef ENABLE_CHECKING
514169689Skan  {
515169689Skan    def_optype_p ptr;
516169689Skan    unsigned x = 0;
517169689Skan    for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
518169689Skan      x++;
519169689Skan
520169689Skan    gcc_assert (x == VEC_length (tree, build_defs));
521169689Skan  }
522169689Skan#endif
523169689Skan}
524169689Skan
525169689Skan/* This routine will create stmt operands for STMT from the def build list.  */
526169689Skan
527169689Skanstatic void
528169689Skanfinalize_ssa_defs (tree stmt)
529169689Skan{
530169689Skan  unsigned int num = VEC_length (tree, build_defs);
531169689Skan
532169689Skan  /* There should only be a single real definition per assignment.  */
533169689Skan  gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
534169689Skan
535169689Skan  /* If there is an old list, often the new list is identical, or close, so
536169689Skan     find the elements at the beginning that are the same as the vector.  */
537169689Skan  finalize_ssa_def_ops (stmt);
538169689Skan  VEC_truncate (tree, build_defs, 0);
539169689Skan}
540169689Skan
541169689Skan/* Takes elements from build_uses and turns them into use operands of STMT.
542169689Skan   TODO -- Make build_uses VEC of tree *.  */
543169689Skan
544169689Skanstatic inline void
545169689Skanfinalize_ssa_use_ops (tree stmt)
546169689Skan{
547169689Skan  unsigned new_i;
548169689Skan  struct use_optype_d new_list;
549169689Skan  use_optype_p old_ops, ptr, last;
550169689Skan
551169689Skan  new_list.next = NULL;
552169689Skan  last = &new_list;
553169689Skan
554169689Skan  old_ops = USE_OPS (stmt);
555169689Skan
556169689Skan  /* If there is anything in the old list, free it.  */
557169689Skan  if (old_ops)
558169689Skan    {
559169689Skan      for (ptr = old_ops; ptr; ptr = ptr->next)
560169689Skan	delink_imm_use (USE_OP_PTR (ptr));
561169689Skan      old_ops->next = free_uses;
562169689Skan      free_uses = old_ops;
563169689Skan    }
564169689Skan
565169689Skan  /* Now create nodes for all the new nodes.  */
566169689Skan  for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
567169689Skan    add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
568169689Skan
569169689Skan  last->next = NULL;
570169689Skan
571169689Skan  /* Now set the stmt's operands.  */
572169689Skan  USE_OPS (stmt) = new_list.next;
573169689Skan
574169689Skan#ifdef ENABLE_CHECKING
575169689Skan  {
576169689Skan    unsigned x = 0;
577169689Skan    for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
578169689Skan      x++;
579169689Skan
580169689Skan    gcc_assert (x == VEC_length (tree, build_uses));
581169689Skan  }
582169689Skan#endif
583169689Skan}
584169689Skan
585169689Skan/* Return a new use operand vector for STMT, comparing to OLD_OPS_P.  */
586169689Skan
587169689Skanstatic void
588169689Skanfinalize_ssa_uses (tree stmt)
589169689Skan{
590169689Skan#ifdef ENABLE_CHECKING
591169689Skan  {
592169689Skan    unsigned x;
593169689Skan    unsigned num = VEC_length (tree, build_uses);
594169689Skan
595169689Skan    /* If the pointer to the operand is the statement itself, something is
596169689Skan       wrong.  It means that we are pointing to a local variable (the
597169689Skan       initial call to update_stmt_operands does not pass a pointer to a
598169689Skan       statement).  */
599169689Skan    for (x = 0; x < num; x++)
600169689Skan      gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
601169689Skan  }
602169689Skan#endif
603169689Skan  finalize_ssa_use_ops (stmt);
604169689Skan  VEC_truncate (tree, build_uses, 0);
605169689Skan}
606169689Skan
607169689Skan
608169689Skan/* Takes elements from build_v_may_defs and turns them into maydef operands of
609169689Skan   STMT.  */
610169689Skan
611169689Skanstatic inline void
612169689Skanfinalize_ssa_v_may_def_ops (tree stmt)
613169689Skan{
614169689Skan  unsigned new_i;
615169689Skan  struct maydef_optype_d new_list;
616169689Skan  maydef_optype_p old_ops, ptr, last;
617169689Skan  tree act;
618169689Skan  unsigned old_base, new_base;
619169689Skan
620169689Skan  new_list.next = NULL;
621169689Skan  last = &new_list;
622169689Skan
623169689Skan  old_ops = MAYDEF_OPS (stmt);
624169689Skan
625169689Skan  new_i = 0;
626169689Skan  while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
627169689Skan    {
628169689Skan      act = VEC_index (tree, build_v_may_defs, new_i);
629169689Skan      new_base = get_name_decl (act);
630169689Skan      old_base = get_name_decl (MAYDEF_OP (old_ops));
631169689Skan
632169689Skan      if (old_base == new_base)
633169689Skan        {
634169689Skan	  /* if variables are the same, reuse this node.  */
635169689Skan	  MOVE_HEAD_AFTER (old_ops, last);
636169689Skan	  set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
637169689Skan	  new_i++;
638169689Skan	}
639169689Skan      else if (old_base < new_base)
640169689Skan	{
641169689Skan	  /* if old is less than new, old goes to the free list.  */
642169689Skan	  delink_imm_use (MAYDEF_OP_PTR (old_ops));
643169689Skan	  MOVE_HEAD_TO_FREELIST (old_ops, maydef);
644169689Skan	}
645169689Skan      else
646169689Skan	{
647169689Skan	  /* This is a new operand.  */
648169689Skan	  add_maydef_op (stmt, act, &last);
649169689Skan	  new_i++;
650169689Skan	}
651169689Skan    }
652169689Skan
653169689Skan  /* If there is anything remaining in the build_v_may_defs list, simply emit it.  */
654169689Skan  for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
655169689Skan    add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
656169689Skan
657169689Skan  last->next = NULL;
658169689Skan
659169689Skan  /* If there is anything in the old list, free it.  */
660169689Skan  if (old_ops)
661169689Skan    {
662169689Skan      for (ptr = old_ops; ptr; ptr = ptr->next)
663169689Skan	delink_imm_use (MAYDEF_OP_PTR (ptr));
664169689Skan      old_ops->next = free_maydefs;
665169689Skan      free_maydefs = old_ops;
666169689Skan    }
667169689Skan
668169689Skan  /* Now set the stmt's operands.  */
669169689Skan  MAYDEF_OPS (stmt) = new_list.next;
670169689Skan
671169689Skan#ifdef ENABLE_CHECKING
672169689Skan  {
673169689Skan    unsigned x = 0;
674169689Skan    for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
675169689Skan      x++;
676169689Skan
677169689Skan    gcc_assert (x == VEC_length (tree, build_v_may_defs));
678169689Skan  }
679169689Skan#endif
680169689Skan}
681169689Skan
682169689Skanstatic void
683169689Skanfinalize_ssa_v_may_defs (tree stmt)
684169689Skan{
685169689Skan  finalize_ssa_v_may_def_ops (stmt);
686169689Skan}
687169689Skan
688169689Skan
689169689Skan/* Clear the in_list bits and empty the build array for V_MAY_DEFs.  */
690169689Skan
691169689Skanstatic inline void
692169689Skancleanup_v_may_defs (void)
693169689Skan{
694169689Skan  unsigned x, num;
695169689Skan  num = VEC_length (tree, build_v_may_defs);
696169689Skan
697169689Skan  for (x = 0; x < num; x++)
698169689Skan    {
699169689Skan      tree t = VEC_index (tree, build_v_may_defs, x);
700169689Skan      if (TREE_CODE (t) != SSA_NAME)
701169689Skan	{
702169689Skan	  var_ann_t ann = var_ann (t);
703169689Skan	  ann->in_v_may_def_list = 0;
704169689Skan	}
705169689Skan    }
706169689Skan  VEC_truncate (tree, build_v_may_defs, 0);
707169689Skan}
708169689Skan
709169689Skan
710169689Skan/* Takes elements from build_vuses and turns them into vuse operands of
711169689Skan   STMT.  */
712169689Skan
713169689Skanstatic inline void
714169689Skanfinalize_ssa_vuse_ops (tree stmt)
715169689Skan{
716169689Skan  unsigned new_i;
717169689Skan  struct vuse_optype_d new_list;
718169689Skan  vuse_optype_p old_ops, ptr, last;
719169689Skan  tree act;
720169689Skan  unsigned old_base, new_base;
721169689Skan
722169689Skan  new_list.next = NULL;
723169689Skan  last = &new_list;
724169689Skan
725169689Skan  old_ops = VUSE_OPS (stmt);
726169689Skan
727169689Skan  new_i = 0;
728169689Skan  while (old_ops && new_i < VEC_length (tree, build_vuses))
729169689Skan    {
730169689Skan      act = VEC_index (tree, build_vuses, new_i);
731169689Skan      new_base = get_name_decl (act);
732169689Skan      old_base = get_name_decl (VUSE_OP (old_ops));
733169689Skan
734169689Skan      if (old_base == new_base)
735169689Skan        {
736169689Skan	  /* if variables are the same, reuse this node.  */
737169689Skan	  MOVE_HEAD_AFTER (old_ops, last);
738169689Skan	  set_virtual_use_link (VUSE_OP_PTR (last), stmt);
739169689Skan	  new_i++;
740169689Skan	}
741169689Skan      else if (old_base < new_base)
742169689Skan	{
743169689Skan	  /* if old is less than new, old goes to the free list.  */
744169689Skan	  delink_imm_use (USE_OP_PTR (old_ops));
745169689Skan	  MOVE_HEAD_TO_FREELIST (old_ops, vuse);
746169689Skan	}
747169689Skan      else
748169689Skan	{
749169689Skan	  /* This is a new operand.  */
750169689Skan	  add_vuse_op (stmt, act, &last);
751169689Skan	  new_i++;
752169689Skan	}
753169689Skan    }
754169689Skan
755169689Skan  /* If there is anything remaining in the build_vuses list, simply emit it.  */
756169689Skan  for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
757169689Skan    add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
758169689Skan
759169689Skan  last->next = NULL;
760169689Skan
761169689Skan  /* If there is anything in the old list, free it.  */
762169689Skan  if (old_ops)
763169689Skan    {
764169689Skan      for (ptr = old_ops; ptr; ptr = ptr->next)
765169689Skan	delink_imm_use (VUSE_OP_PTR (ptr));
766169689Skan      old_ops->next = free_vuses;
767169689Skan      free_vuses = old_ops;
768169689Skan    }
769169689Skan
770169689Skan  /* Now set the stmt's operands.  */
771169689Skan  VUSE_OPS (stmt) = new_list.next;
772169689Skan
773169689Skan#ifdef ENABLE_CHECKING
774169689Skan  {
775169689Skan    unsigned x = 0;
776169689Skan    for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
777169689Skan      x++;
778169689Skan
779169689Skan    gcc_assert (x == VEC_length (tree, build_vuses));
780169689Skan  }
781169689Skan#endif
782169689Skan}
783169689Skan
784169689Skan/* Return a new VUSE operand vector, comparing to OLD_OPS_P.  */
785169689Skan
786169689Skanstatic void
787169689Skanfinalize_ssa_vuses (tree stmt)
788169689Skan{
789169689Skan  unsigned num, num_v_may_defs;
790169689Skan  unsigned vuse_index;
791169689Skan
792169689Skan  /* Remove superfluous VUSE operands.  If the statement already has a
793169689Skan     V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
794169689Skan     not needed because V_MAY_DEFs imply a VUSE of the variable.  For
795169689Skan     instance, suppose that variable 'a' is aliased:
796169689Skan
797169689Skan	      # VUSE <a_2>
798169689Skan	      # a_3 = V_MAY_DEF <a_2>
799169689Skan	      a = a + 1;
800169689Skan
801169689Skan     The VUSE <a_2> is superfluous because it is implied by the
802169689Skan     V_MAY_DEF operation.  */
803169689Skan  num = VEC_length (tree, build_vuses);
804169689Skan  num_v_may_defs = VEC_length (tree, build_v_may_defs);
805169689Skan
806169689Skan  if (num > 0 && num_v_may_defs > 0)
807169689Skan    {
808169689Skan      for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
809169689Skan        {
810169689Skan	  tree vuse;
811169689Skan	  vuse = VEC_index (tree, build_vuses, vuse_index);
812169689Skan	  if (TREE_CODE (vuse) != SSA_NAME)
813169689Skan	    {
814169689Skan	      var_ann_t ann = var_ann (vuse);
815169689Skan	      ann->in_vuse_list = 0;
816169689Skan	      if (ann->in_v_may_def_list)
817169689Skan	        {
818169689Skan		  VEC_ordered_remove (tree, build_vuses, vuse_index);
819169689Skan		  continue;
820169689Skan		}
821169689Skan	    }
822169689Skan	  vuse_index++;
823169689Skan	}
824169689Skan    }
825169689Skan  else
826169689Skan    {
827169689Skan      /* Clear out the in_list bits.  */
828169689Skan      for (vuse_index = 0;
829169689Skan	  vuse_index < VEC_length (tree, build_vuses);
830169689Skan	  vuse_index++)
831169689Skan	{
832169689Skan	  tree t = VEC_index (tree, build_vuses, vuse_index);
833169689Skan	  if (TREE_CODE (t) != SSA_NAME)
834169689Skan	    {
835169689Skan	      var_ann_t ann = var_ann (t);
836169689Skan	      ann->in_vuse_list = 0;
837169689Skan	    }
838169689Skan	}
839169689Skan    }
840169689Skan
841169689Skan  finalize_ssa_vuse_ops (stmt);
842169689Skan
843169689Skan  /* The V_MAY_DEF build vector wasn't cleaned up because we needed it.  */
844169689Skan  cleanup_v_may_defs ();
845169689Skan
846169689Skan  /* Free the VUSEs build vector.  */
847169689Skan  VEC_truncate (tree, build_vuses, 0);
848169689Skan
849169689Skan}
850169689Skan
851169689Skan/* Takes elements from build_v_must_defs and turns them into mustdef operands of
852169689Skan   STMT.  */
853169689Skan
854169689Skanstatic inline void
855169689Skanfinalize_ssa_v_must_def_ops (tree stmt)
856169689Skan{
857169689Skan  unsigned new_i;
858169689Skan  struct mustdef_optype_d new_list;
859169689Skan  mustdef_optype_p old_ops, ptr, last;
860169689Skan  tree act;
861169689Skan  unsigned old_base, new_base;
862169689Skan
863169689Skan  new_list.next = NULL;
864169689Skan  last = &new_list;
865169689Skan
866169689Skan  old_ops = MUSTDEF_OPS (stmt);
867169689Skan
868169689Skan  new_i = 0;
869169689Skan  while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
870169689Skan    {
871169689Skan      act = VEC_index (tree, build_v_must_defs, new_i);
872169689Skan      new_base = get_name_decl (act);
873169689Skan      old_base = get_name_decl (MUSTDEF_KILL (old_ops));
874169689Skan
875169689Skan      if (old_base == new_base)
876169689Skan        {
877169689Skan	  /* If variables are the same, reuse this node.  */
878169689Skan	  MOVE_HEAD_AFTER (old_ops, last);
879169689Skan	  set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
880169689Skan	  new_i++;
881169689Skan	}
882169689Skan      else if (old_base < new_base)
883169689Skan	{
884169689Skan	  /* If old is less than new, old goes to the free list.  */
885169689Skan	  delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
886169689Skan	  MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
887169689Skan	}
888169689Skan      else
889169689Skan	{
890169689Skan	  /* This is a new operand.  */
891169689Skan	  add_mustdef_op (stmt, act, &last);
892169689Skan	  new_i++;
893169689Skan	}
894169689Skan    }
895169689Skan
896169689Skan  /* If there is anything remaining in the build_v_must_defs list, simply emit it.  */
897169689Skan  for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
898169689Skan    add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
899169689Skan
900169689Skan  last->next = NULL;
901169689Skan
902169689Skan  /* If there is anything in the old list, free it.  */
903169689Skan  if (old_ops)
904169689Skan    {
905169689Skan      for (ptr = old_ops; ptr; ptr = ptr->next)
906169689Skan	delink_imm_use (MUSTDEF_KILL_PTR (ptr));
907169689Skan      old_ops->next = free_mustdefs;
908169689Skan      free_mustdefs = old_ops;
909169689Skan    }
910169689Skan
911169689Skan  /* Now set the stmt's operands.  */
912169689Skan  MUSTDEF_OPS (stmt) = new_list.next;
913169689Skan
914169689Skan#ifdef ENABLE_CHECKING
915169689Skan  {
916169689Skan    unsigned x = 0;
917169689Skan    for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
918169689Skan      x++;
919169689Skan
920169689Skan    gcc_assert (x == VEC_length (tree, build_v_must_defs));
921169689Skan  }
922169689Skan#endif
923169689Skan}
924169689Skan
925169689Skanstatic void
926169689Skanfinalize_ssa_v_must_defs (tree stmt)
927169689Skan{
928169689Skan  /* In the presence of subvars, there may be more than one V_MUST_DEF
929169689Skan     per statement (one for each subvar).  It is a bit expensive to
930169689Skan     verify that all must-defs in a statement belong to subvars if
931169689Skan     there is more than one must-def, so we don't do it.  Suffice to
932169689Skan     say, if you reach here without having subvars, and have num >1,
933169689Skan     you have hit a bug.  */
934169689Skan  finalize_ssa_v_must_def_ops (stmt);
935169689Skan  VEC_truncate (tree, build_v_must_defs, 0);
936169689Skan}
937169689Skan
938169689Skan
939169689Skan/* Finalize all the build vectors, fill the new ones into INFO.  */
940169689Skan
941169689Skanstatic inline void
942169689Skanfinalize_ssa_stmt_operands (tree stmt)
943169689Skan{
944169689Skan  finalize_ssa_defs (stmt);
945169689Skan  finalize_ssa_uses (stmt);
946169689Skan  finalize_ssa_v_must_defs (stmt);
947169689Skan  finalize_ssa_v_may_defs (stmt);
948169689Skan  finalize_ssa_vuses (stmt);
949169689Skan}
950169689Skan
951169689Skan
952169689Skan/* Start the process of building up operands vectors in INFO.  */
953169689Skan
954169689Skanstatic inline void
955169689Skanstart_ssa_stmt_operands (void)
956169689Skan{
957169689Skan  gcc_assert (VEC_length (tree, build_defs) == 0);
958169689Skan  gcc_assert (VEC_length (tree, build_uses) == 0);
959169689Skan  gcc_assert (VEC_length (tree, build_vuses) == 0);
960169689Skan  gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
961169689Skan  gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
962169689Skan}
963169689Skan
964169689Skan
965169689Skan/* Add DEF_P to the list of pointers to operands.  */
966169689Skan
967169689Skanstatic inline void
968169689Skanappend_def (tree *def_p)
969169689Skan{
970169689Skan  VEC_safe_push (tree, heap, build_defs, (tree)def_p);
971169689Skan}
972169689Skan
973169689Skan
974169689Skan/* Add USE_P to the list of pointers to operands.  */
975169689Skan
976169689Skanstatic inline void
977169689Skanappend_use (tree *use_p)
978169689Skan{
979169689Skan  VEC_safe_push (tree, heap, build_uses, (tree)use_p);
980169689Skan}
981169689Skan
982169689Skan
983169689Skan/* Add a new virtual may def for variable VAR to the build array.  */
984169689Skan
985169689Skanstatic inline void
986169689Skanappend_v_may_def (tree var)
987169689Skan{
988169689Skan  if (TREE_CODE (var) != SSA_NAME)
989169689Skan    {
990169689Skan      var_ann_t ann = get_var_ann (var);
991169689Skan
992169689Skan      /* Don't allow duplicate entries.  */
993169689Skan      if (ann->in_v_may_def_list)
994169689Skan	return;
995169689Skan      ann->in_v_may_def_list = 1;
996169689Skan    }
997169689Skan
998169689Skan  VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
999169689Skan}
1000169689Skan
1001169689Skan
1002169689Skan/* Add VAR to the list of virtual uses.  */
1003169689Skan
1004169689Skanstatic inline void
1005169689Skanappend_vuse (tree var)
1006169689Skan{
1007169689Skan  /* Don't allow duplicate entries.  */
1008169689Skan  if (TREE_CODE (var) != SSA_NAME)
1009169689Skan    {
1010169689Skan      var_ann_t ann = get_var_ann (var);
1011169689Skan
1012169689Skan      if (ann->in_vuse_list || ann->in_v_may_def_list)
1013169689Skan        return;
1014169689Skan      ann->in_vuse_list = 1;
1015169689Skan    }
1016169689Skan
1017169689Skan  VEC_safe_push (tree, heap, build_vuses, (tree)var);
1018169689Skan}
1019169689Skan
1020169689Skan
1021169689Skan/* Add VAR to the list of virtual must definitions for INFO.  */
1022169689Skan
1023169689Skanstatic inline void
1024169689Skanappend_v_must_def (tree var)
1025169689Skan{
1026169689Skan  unsigned i;
1027169689Skan
1028169689Skan  /* Don't allow duplicate entries.  */
1029169689Skan  for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1030169689Skan    if (var == VEC_index (tree, build_v_must_defs, i))
1031169689Skan      return;
1032169689Skan
1033169689Skan  VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1034169689Skan}
1035169689Skan
1036169689Skan
1037169689Skan/* REF is a tree that contains the entire pointer dereference
1038169689Skan   expression, if available, or NULL otherwise.  ALIAS is the variable
1039169689Skan   we are asking if REF can access.  OFFSET and SIZE come from the
1040169689Skan   memory access expression that generated this virtual operand.  */
1041169689Skan
1042169689Skanstatic bool
1043169689Skanaccess_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1044169689Skan			   HOST_WIDE_INT size)
1045169689Skan{
1046169689Skan  bool offsetgtz = offset > 0;
1047169689Skan  unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1048169689Skan  tree base = ref ? get_base_address (ref) : NULL;
1049169689Skan
1050169689Skan  /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1051169689Skan     using a call-clobbered memory tag.  By definition, call-clobbered
1052169689Skan     memory tags can always touch .GLOBAL_VAR.  */
1053169689Skan  if (alias == global_var)
1054169689Skan    return true;
1055169689Skan
1056169689Skan  /* We cannot prune nonlocal aliases because they are not type
1057169689Skan     specific.  */
1058169689Skan  if (alias == nonlocal_all)
1059169689Skan    return true;
1060169689Skan
1061169689Skan  /* If ALIAS is an SFT, it can't be touched if the offset
1062169689Skan     and size of the access is not overlapping with the SFT offset and
1063169689Skan     size.  This is only true if we are accessing through a pointer
1064169689Skan     to a type that is the same as SFT_PARENT_VAR.  Otherwise, we may
1065169689Skan     be accessing through a pointer to some substruct of the
1066169689Skan     structure, and if we try to prune there, we will have the wrong
1067169689Skan     offset, and get the wrong answer.
1068169689Skan     i.e., we can't prune without more work if we have something like
1069169689Skan
1070169689Skan     struct gcc_target
1071169689Skan     {
1072169689Skan       struct asm_out
1073169689Skan       {
1074169689Skan         const char *byte_op;
1075169689Skan	 struct asm_int_op
1076169689Skan	 {
1077169689Skan	   const char *hi;
1078169689Skan	 } aligned_op;
1079169689Skan       } asm_out;
1080169689Skan     } targetm;
1081169689Skan
1082169689Skan     foo = &targetm.asm_out.aligned_op;
1083169689Skan     return foo->hi;
1084169689Skan
1085169689Skan     SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1086169689Skan     terms of SFT_PARENT_VAR, that is where it is.
1087169689Skan     However, the access through the foo pointer will be at offset 0.  */
1088169689Skan  if (size != -1
1089169689Skan      && TREE_CODE (alias) == STRUCT_FIELD_TAG
1090169689Skan      && base
1091169689Skan      && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1092169689Skan      && !overlap_subvar (offset, size, alias, NULL))
1093169689Skan    {
1094169689Skan#ifdef ACCESS_DEBUGGING
1095169689Skan      fprintf (stderr, "Access to ");
1096169689Skan      print_generic_expr (stderr, ref, 0);
1097169689Skan      fprintf (stderr, " may not touch ");
1098169689Skan      print_generic_expr (stderr, alias, 0);
1099169689Skan      fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1100169689Skan#endif
1101169689Skan      return false;
1102169689Skan    }
1103169689Skan
1104169689Skan  /* Without strict aliasing, it is impossible for a component access
1105169689Skan     through a pointer to touch a random variable, unless that
1106169689Skan     variable *is* a structure or a pointer.
1107169689Skan
1108169689Skan     That is, given p->c, and some random global variable b,
1109169689Skan     there is no legal way that p->c could be an access to b.
1110169689Skan
1111169689Skan     Without strict aliasing on, we consider it legal to do something
1112169689Skan     like:
1113169689Skan
1114169689Skan     struct foos { int l; };
1115169689Skan     int foo;
1116169689Skan     static struct foos *getfoo(void);
1117169689Skan     int main (void)
1118169689Skan     {
1119169689Skan       struct foos *f = getfoo();
1120169689Skan       f->l = 1;
1121169689Skan       foo = 2;
1122169689Skan       if (f->l == 1)
1123169689Skan         abort();
1124169689Skan       exit(0);
1125169689Skan     }
1126169689Skan     static struct foos *getfoo(void)
1127169689Skan     { return (struct foos *)&foo; }
1128169689Skan
1129169689Skan     (taken from 20000623-1.c)
1130169689Skan
1131169689Skan     The docs also say/imply that access through union pointers
1132169689Skan     is legal (but *not* if you take the address of the union member,
1133169689Skan     i.e. the inverse), such that you can do
1134169689Skan
1135169689Skan     typedef union {
1136169689Skan       int d;
1137169689Skan     } U;
1138169689Skan
1139169689Skan     int rv;
1140169689Skan     void breakme()
1141169689Skan     {
1142169689Skan       U *rv0;
1143169689Skan       U *pretmp = (U*)&rv;
1144169689Skan       rv0 = pretmp;
1145169689Skan       rv0->d = 42;
1146169689Skan     }
1147169689Skan     To implement this, we just punt on accesses through union
1148169689Skan     pointers entirely.
1149169689Skan  */
1150169689Skan  else if (ref
1151169689Skan	   && flag_strict_aliasing
1152169689Skan	   && TREE_CODE (ref) != INDIRECT_REF
1153169689Skan	   && !MTAG_P (alias)
1154169689Skan	   && (TREE_CODE (base) != INDIRECT_REF
1155169689Skan	       || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1156169689Skan	   && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1157169689Skan	   && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1158169689Skan	   && !POINTER_TYPE_P (TREE_TYPE (alias))
1159169689Skan	   /* When the struct has may_alias attached to it, we need not to
1160169689Skan	      return true.  */
1161169689Skan	   && get_alias_set (base))
1162169689Skan    {
1163169689Skan#ifdef ACCESS_DEBUGGING
1164169689Skan      fprintf (stderr, "Access to ");
1165169689Skan      print_generic_expr (stderr, ref, 0);
1166169689Skan      fprintf (stderr, " may not touch ");
1167169689Skan      print_generic_expr (stderr, alias, 0);
1168169689Skan      fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1169169689Skan#endif
1170169689Skan      return false;
1171169689Skan    }
1172169689Skan
1173169689Skan  /* If the offset of the access is greater than the size of one of
1174169689Skan     the possible aliases, it can't be touching that alias, because it
1175169689Skan     would be past the end of the structure.  */
1176169689Skan  else if (ref
1177169689Skan	   && flag_strict_aliasing
1178169689Skan	   && TREE_CODE (ref) != INDIRECT_REF
1179169689Skan	   && !MTAG_P (alias)
1180169689Skan	   && !POINTER_TYPE_P (TREE_TYPE (alias))
1181169689Skan	   && offsetgtz
1182169689Skan	   && DECL_SIZE (alias)
1183169689Skan	   && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1184169689Skan	   && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1185169689Skan    {
1186169689Skan#ifdef ACCESS_DEBUGGING
1187169689Skan      fprintf (stderr, "Access to ");
1188169689Skan      print_generic_expr (stderr, ref, 0);
1189169689Skan      fprintf (stderr, " may not touch ");
1190169689Skan      print_generic_expr (stderr, alias, 0);
1191169689Skan      fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1192169689Skan#endif
1193169689Skan      return false;
1194169689Skan    }
1195169689Skan
1196169689Skan  return true;
1197169689Skan}
1198169689Skan
1199169689Skan
1200169689Skan/* Add VAR to the virtual operands array.  FLAGS is as in
1201169689Skan   get_expr_operands.  FULL_REF is a tree that contains the entire
1202169689Skan   pointer dereference expression, if available, or NULL otherwise.
1203169689Skan   OFFSET and SIZE come from the memory access expression that
1204169689Skan   generated this virtual operand.  FOR_CLOBBER is true is this is
1205169689Skan   adding a virtual operand for a call clobber.  */
1206169689Skan
1207169689Skanstatic void
1208169689Skanadd_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1209169689Skan		     tree full_ref, HOST_WIDE_INT offset,
1210169689Skan		     HOST_WIDE_INT size, bool for_clobber)
1211169689Skan{
1212169689Skan  VEC(tree,gc) *aliases;
1213169689Skan  tree sym;
1214169689Skan  var_ann_t v_ann;
1215169689Skan
1216169689Skan  sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1217169689Skan  v_ann = var_ann (sym);
1218169689Skan
1219169689Skan  /* Mark statements with volatile operands.  Optimizers should back
1220169689Skan     off from statements having volatile operands.  */
1221169689Skan  if (TREE_THIS_VOLATILE (sym) && s_ann)
1222169689Skan    s_ann->has_volatile_ops = true;
1223169689Skan
1224169689Skan  /* If the variable cannot be modified and this is a V_MAY_DEF change
1225169689Skan     it into a VUSE.  This happens when read-only variables are marked
1226169689Skan     call-clobbered and/or aliased to writable variables.  So we only
1227169689Skan     check that this only happens on non-specific stores.
1228169689Skan
1229169689Skan     Note that if this is a specific store, i.e. associated with a
1230169689Skan     modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1231169689Skan     into validation problems.
1232169689Skan
1233169689Skan     This can happen when programs cast away const, leaving us with a
1234169689Skan     store to read-only memory.  If the statement is actually executed
1235169689Skan     at runtime, then the program is ill formed.  If the statement is
1236169689Skan     not executed then all is well.  At the very least, we cannot ICE.  */
1237169689Skan  if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1238169689Skan    flags &= ~(opf_is_def | opf_kill_def);
1239169689Skan
1240169689Skan  /* The variable is not a GIMPLE register.  Add it (or its aliases) to
1241169689Skan     virtual operands, unless the caller has specifically requested
1242169689Skan     not to add virtual operands (used when adding operands inside an
1243169689Skan     ADDR_EXPR expression).  */
1244169689Skan  if (flags & opf_no_vops)
1245169689Skan    return;
1246169689Skan
1247169689Skan  aliases = v_ann->may_aliases;
1248169689Skan  if (aliases == NULL)
1249169689Skan    {
1250169689Skan      /* The variable is not aliased or it is an alias tag.  */
1251169689Skan      if (flags & opf_is_def)
1252169689Skan	{
1253169689Skan	  if (flags & opf_kill_def)
1254169689Skan	    {
1255169689Skan	      /* V_MUST_DEF for non-aliased, non-GIMPLE register
1256169689Skan		 variable definitions.  */
1257169689Skan	      gcc_assert (!MTAG_P (var)
1258169689Skan			  || TREE_CODE (var) == STRUCT_FIELD_TAG);
1259169689Skan	      append_v_must_def (var);
1260169689Skan	    }
1261169689Skan	  else
1262169689Skan	    {
1263169689Skan	      /* Add a V_MAY_DEF for call-clobbered variables and
1264169689Skan		 memory tags.  */
1265169689Skan	      append_v_may_def (var);
1266169689Skan	    }
1267169689Skan	}
1268169689Skan      else
1269169689Skan	append_vuse (var);
1270169689Skan    }
1271169689Skan  else
1272169689Skan    {
1273169689Skan      unsigned i;
1274169689Skan      tree al;
1275169689Skan
1276169689Skan      /* The variable is aliased.  Add its aliases to the virtual
1277169689Skan	 operands.  */
1278169689Skan      gcc_assert (VEC_length (tree, aliases) != 0);
1279169689Skan
1280169689Skan      if (flags & opf_is_def)
1281169689Skan	{
1282169689Skan
1283169689Skan	  bool none_added = true;
1284169689Skan
1285169689Skan	  for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1286169689Skan	    {
1287169689Skan	      if (!access_can_touch_variable (full_ref, al, offset, size))
1288169689Skan		continue;
1289169689Skan
1290169689Skan	      none_added = false;
1291169689Skan	      append_v_may_def (al);
1292169689Skan	    }
1293169689Skan
1294169689Skan	  /* If the variable is also an alias tag, add a virtual
1295169689Skan	     operand for it, otherwise we will miss representing
1296169689Skan	     references to the members of the variable's alias set.
1297169689Skan	     This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1298169689Skan
1299169689Skan	     It is also necessary to add bare defs on clobbers for
1300169689Skan	     SMT's, so that bare SMT uses caused by pruning all the
1301169689Skan	     aliases will link up properly with calls.   In order to
1302169689Skan	     keep the number of these bare defs we add down to the
1303169689Skan	     minimum necessary, we keep track of which SMT's were used
1304169689Skan	     alone in statement vdefs or VUSEs.  */
1305169689Skan	  if (v_ann->is_aliased
1306169689Skan	      || none_added
1307169689Skan	      || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1308169689Skan		  && for_clobber
1309169689Skan		  && SMT_USED_ALONE (var)))
1310169689Skan	    {
1311169689Skan	      /* Every bare SMT def we add should have SMT_USED_ALONE
1312169689Skan		 set on it, or else we will get the wrong answer on
1313169689Skan		 clobbers.  Sadly, this assertion trips on code that
1314169689Skan		 violates strict aliasing rules, because they *do* get
1315169689Skan		 the clobbers wrong, since it is illegal code.  As a
1316169689Skan		 result, we currently only enable it for aliasing
1317169689Skan		 debugging.  Someone might wish to turn this code into
1318169689Skan		 a nice strict-aliasing warning, since we *know* it
1319169689Skan		 will get the wrong answer...  */
1320169689Skan#ifdef ACCESS_DEBUGGING
1321169689Skan	      if (none_added
1322169689Skan		  && !updating_used_alone && aliases_computed_p
1323169689Skan		  && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1324169689Skan		gcc_assert (SMT_USED_ALONE (var));
1325169689Skan#endif
1326169689Skan	      append_v_may_def (var);
1327169689Skan	    }
1328169689Skan	}
1329169689Skan      else
1330169689Skan	{
1331169689Skan	  bool none_added = true;
1332169689Skan	  for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1333169689Skan	    {
1334169689Skan	      if (!access_can_touch_variable (full_ref, al, offset, size))
1335169689Skan		continue;
1336169689Skan	      none_added = false;
1337169689Skan	      append_vuse (al);
1338169689Skan	    }
1339169689Skan
1340169689Skan	  /* Similarly, append a virtual uses for VAR itself, when
1341169689Skan	     it is an alias tag.  */
1342169689Skan	  if (v_ann->is_aliased || none_added)
1343169689Skan	    append_vuse (var);
1344169689Skan	}
1345169689Skan    }
1346169689Skan}
1347169689Skan
1348169689Skan
1349169689Skan/* Add *VAR_P to the appropriate operand array for S_ANN.  FLAGS is as in
1350169689Skan   get_expr_operands.  If *VAR_P is a GIMPLE register, it will be added to
1351169689Skan   the statement's real operands, otherwise it is added to virtual
1352169689Skan   operands.  */
1353169689Skan
1354169689Skanstatic void
1355169689Skanadd_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1356169689Skan{
1357169689Skan  bool is_real_op;
1358169689Skan  tree var, sym;
1359169689Skan  var_ann_t v_ann;
1360169689Skan
1361169689Skan  var = *var_p;
1362169689Skan  gcc_assert (SSA_VAR_P (var));
1363169689Skan
1364169689Skan  is_real_op = is_gimple_reg (var);
1365169689Skan
1366169689Skan  /* If this is a real operand, the operand is either an SSA name or a
1367169689Skan     decl.  Virtual operands may only be decls.  */
1368169689Skan  gcc_assert (is_real_op || DECL_P (var));
1369169689Skan
1370169689Skan  sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1371169689Skan  v_ann = var_ann (sym);
1372169689Skan
1373169689Skan  /* Mark statements with volatile operands.  Optimizers should back
1374169689Skan     off from statements having volatile operands.  */
1375169689Skan  if (TREE_THIS_VOLATILE (sym) && s_ann)
1376169689Skan    s_ann->has_volatile_ops = true;
1377169689Skan
1378169689Skan  if (is_real_op)
1379169689Skan    {
1380169689Skan      /* The variable is a GIMPLE register.  Add it to real operands.  */
1381169689Skan      if (flags & opf_is_def)
1382169689Skan	append_def (var_p);
1383169689Skan      else
1384169689Skan	append_use (var_p);
1385169689Skan    }
1386169689Skan  else
1387169689Skan    add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1388169689Skan}
1389169689Skan
1390169689Skan
1391169689Skan/* A subroutine of get_expr_operands to handle INDIRECT_REF,
1392169689Skan   ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1393169689Skan
1394169689Skan   STMT is the statement being processed, EXPR is the INDIRECT_REF
1395169689Skan      that got us here.
1396169689Skan
1397169689Skan   FLAGS is as in get_expr_operands.
1398169689Skan
1399169689Skan   FULL_REF contains the full pointer dereference expression, if we
1400169689Skan      have it, or NULL otherwise.
1401169689Skan
1402169689Skan   OFFSET and SIZE are the location of the access inside the
1403169689Skan      dereferenced pointer, if known.
1404169689Skan
1405169689Skan   RECURSE_ON_BASE should be set to true if we want to continue
1406169689Skan      calling get_expr_operands on the base pointer, and false if
1407169689Skan      something else will do it for us.  */
1408169689Skan
1409169689Skanstatic void
1410169689Skanget_indirect_ref_operands (tree stmt, tree expr, int flags,
1411169689Skan			   tree full_ref,
1412169689Skan			   HOST_WIDE_INT offset, HOST_WIDE_INT size,
1413169689Skan			   bool recurse_on_base)
1414169689Skan{
1415169689Skan  tree *pptr = &TREE_OPERAND (expr, 0);
1416169689Skan  tree ptr = *pptr;
1417169689Skan  stmt_ann_t s_ann = stmt_ann (stmt);
1418169689Skan
1419169689Skan  /* Stores into INDIRECT_REF operands are never killing definitions.  */
1420169689Skan  flags &= ~opf_kill_def;
1421169689Skan
1422169689Skan  if (SSA_VAR_P (ptr))
1423169689Skan    {
1424169689Skan      struct ptr_info_def *pi = NULL;
1425169689Skan
1426169689Skan      /* If PTR has flow-sensitive points-to information, use it.  */
1427169689Skan      if (TREE_CODE (ptr) == SSA_NAME
1428169689Skan	  && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1429169689Skan	  && pi->name_mem_tag)
1430169689Skan	{
1431169689Skan	  /* PTR has its own memory tag.  Use it.  */
1432169689Skan	  add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1433169689Skan			       full_ref, offset, size, false);
1434169689Skan	}
1435169689Skan      else
1436169689Skan	{
1437169689Skan	  /* If PTR is not an SSA_NAME or it doesn't have a name
1438169689Skan	     tag, use its symbol memory tag.  */
1439169689Skan	  var_ann_t v_ann;
1440169689Skan
1441169689Skan	  /* If we are emitting debugging dumps, display a warning if
1442169689Skan	     PTR is an SSA_NAME with no flow-sensitive alias
1443169689Skan	     information.  That means that we may need to compute
1444169689Skan	     aliasing again.  */
1445169689Skan	  if (dump_file
1446169689Skan	      && TREE_CODE (ptr) == SSA_NAME
1447169689Skan	      && pi == NULL)
1448169689Skan	    {
1449169689Skan	      fprintf (dump_file,
1450169689Skan		  "NOTE: no flow-sensitive alias info for ");
1451169689Skan	      print_generic_expr (dump_file, ptr, dump_flags);
1452169689Skan	      fprintf (dump_file, " in ");
1453169689Skan	      print_generic_stmt (dump_file, stmt, dump_flags);
1454169689Skan	    }
1455169689Skan
1456169689Skan	  if (TREE_CODE (ptr) == SSA_NAME)
1457169689Skan	    ptr = SSA_NAME_VAR (ptr);
1458169689Skan	  v_ann = var_ann (ptr);
1459169689Skan
1460169689Skan	  if (v_ann->symbol_mem_tag)
1461169689Skan	    add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1462169689Skan				 full_ref, offset, size, false);
1463169689Skan	}
1464169689Skan    }
1465169689Skan  else if (TREE_CODE (ptr) == INTEGER_CST)
1466169689Skan    {
1467169689Skan      /* If a constant is used as a pointer, we can't generate a real
1468169689Skan	 operand for it but we mark the statement volatile to prevent
1469169689Skan	 optimizations from messing things up.  */
1470169689Skan      if (s_ann)
1471169689Skan	s_ann->has_volatile_ops = true;
1472169689Skan      return;
1473169689Skan    }
1474169689Skan  else
1475169689Skan    {
1476169689Skan      /* Ok, this isn't even is_gimple_min_invariant.  Something's broke.  */
1477169689Skan      gcc_unreachable ();
1478169689Skan    }
1479169689Skan
1480169689Skan  /* If requested, add a USE operand for the base pointer.  */
1481169689Skan  if (recurse_on_base)
1482169689Skan    get_expr_operands (stmt, pptr, opf_none);
1483169689Skan}
1484169689Skan
1485169689Skan
1486169689Skan/* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
1487169689Skan
1488169689Skanstatic void
1489169689Skanget_tmr_operands (tree stmt, tree expr, int flags)
1490169689Skan{
1491169689Skan  tree tag = TMR_TAG (expr), ref;
1492169689Skan  HOST_WIDE_INT offset, size, maxsize;
1493169689Skan  subvar_t svars, sv;
1494169689Skan  stmt_ann_t s_ann = stmt_ann (stmt);
1495169689Skan
1496169689Skan  /* First record the real operands.  */
1497169689Skan  get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1498169689Skan  get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1499169689Skan
1500169689Skan  /* MEM_REFs should never be killing.  */
1501169689Skan  flags &= ~opf_kill_def;
1502169689Skan
1503169689Skan  if (TMR_SYMBOL (expr))
1504169689Skan    {
1505169689Skan      stmt_ann_t ann = stmt_ann (stmt);
1506169689Skan      add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1507169689Skan    }
1508169689Skan
1509169689Skan  if (!tag)
1510169689Skan    {
1511169689Skan      /* Something weird, so ensure that we will be careful.  */
1512169689Skan      stmt_ann (stmt)->has_volatile_ops = true;
1513169689Skan      return;
1514169689Skan    }
1515169689Skan
1516169689Skan  if (DECL_P (tag))
1517169689Skan    {
1518169689Skan      get_expr_operands (stmt, &tag, flags);
1519169689Skan      return;
1520169689Skan    }
1521169689Skan
1522169689Skan  ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1523169689Skan  gcc_assert (ref != NULL_TREE);
1524169689Skan  svars = get_subvars_for_var (ref);
1525169689Skan  for (sv = svars; sv; sv = sv->next)
1526169689Skan    {
1527169689Skan      bool exact;
1528169689Skan      if (overlap_subvar (offset, maxsize, sv->var, &exact))
1529169689Skan	{
1530169689Skan	  int subvar_flags = flags;
1531169689Skan	  if (!exact || size != maxsize)
1532169689Skan	    subvar_flags &= ~opf_kill_def;
1533169689Skan	  add_stmt_operand (&sv->var, s_ann, subvar_flags);
1534169689Skan	}
1535169689Skan    }
1536169689Skan}
1537169689Skan
1538169689Skan
1539169689Skan/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1540169689Skan   clobbered variables in the function.  */
1541169689Skan
1542169689Skanstatic void
1543169689Skanadd_call_clobber_ops (tree stmt, tree callee)
1544169689Skan{
1545169689Skan  unsigned u;
1546169689Skan  bitmap_iterator bi;
1547169689Skan  stmt_ann_t s_ann = stmt_ann (stmt);
1548169689Skan  bitmap not_read_b, not_written_b;
1549169689Skan
1550169689Skan  /* Functions that are not const, pure or never return may clobber
1551169689Skan     call-clobbered variables.  */
1552169689Skan  if (s_ann)
1553169689Skan    s_ann->makes_clobbering_call = true;
1554169689Skan
1555169689Skan  /* If we created .GLOBAL_VAR earlier, just use it.  See compute_may_aliases
1556169689Skan     for the heuristic used to decide whether to create .GLOBAL_VAR or not.  */
1557169689Skan  if (global_var)
1558169689Skan    {
1559169689Skan      add_stmt_operand (&global_var, s_ann, opf_is_def);
1560169689Skan      return;
1561169689Skan    }
1562169689Skan
1563169689Skan  /* Get info for local and module level statics.  There is a bit
1564169689Skan     set for each static if the call being processed does not read
1565169689Skan     or write that variable.  */
1566169689Skan  not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1567169689Skan  not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1568169689Skan  /* Add a V_MAY_DEF operand for every call clobbered variable.  */
1569169689Skan  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1570169689Skan    {
1571169689Skan      tree var = referenced_var_lookup (u);
1572169689Skan      unsigned int escape_mask = var_ann (var)->escape_mask;
1573169689Skan      tree real_var = var;
1574169689Skan      bool not_read;
1575169689Skan      bool not_written;
1576169689Skan
1577169689Skan      /* Not read and not written are computed on regular vars, not
1578169689Skan	 subvars, so look at the parent var if this is an SFT. */
1579169689Skan      if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1580169689Skan	real_var = SFT_PARENT_VAR (var);
1581169689Skan
1582169689Skan      not_read = not_read_b ? bitmap_bit_p (not_read_b,
1583169689Skan					    DECL_UID (real_var)) : false;
1584169689Skan      not_written = not_written_b ? bitmap_bit_p (not_written_b,
1585169689Skan						  DECL_UID (real_var)) : false;
1586169689Skan      gcc_assert (!unmodifiable_var_p (var));
1587169689Skan
1588169689Skan      clobber_stats.clobbered_vars++;
1589169689Skan
1590169689Skan      /* See if this variable is really clobbered by this function.  */
1591169689Skan
1592169689Skan      /* Trivial case: Things escaping only to pure/const are not
1593169689Skan	 clobbered by non-pure-const, and only read by pure/const. */
1594169689Skan      if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1595169689Skan	{
1596169689Skan	  tree call = get_call_expr_in (stmt);
1597169689Skan	  if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1598169689Skan	    {
1599169689Skan	      add_stmt_operand (&var, s_ann, opf_none);
1600169689Skan	      clobber_stats.unescapable_clobbers_avoided++;
1601169689Skan	      continue;
1602169689Skan	    }
1603169689Skan	  else
1604169689Skan	    {
1605169689Skan	      clobber_stats.unescapable_clobbers_avoided++;
1606169689Skan	      continue;
1607169689Skan	    }
1608169689Skan	}
1609169689Skan
1610169689Skan      if (not_written)
1611169689Skan	{
1612169689Skan	  clobber_stats.static_write_clobbers_avoided++;
1613169689Skan	  if (!not_read)
1614169689Skan	    add_stmt_operand (&var, s_ann, opf_none);
1615169689Skan	  else
1616169689Skan	    clobber_stats.static_read_clobbers_avoided++;
1617169689Skan	}
1618169689Skan      else
1619169689Skan	add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1620169689Skan    }
1621169689Skan}
1622169689Skan
1623169689Skan
1624169689Skan/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1625169689Skan   function.  */
1626169689Skan
1627169689Skanstatic void
1628169689Skanadd_call_read_ops (tree stmt, tree callee)
1629169689Skan{
1630169689Skan  unsigned u;
1631169689Skan  bitmap_iterator bi;
1632169689Skan  stmt_ann_t s_ann = stmt_ann (stmt);
1633169689Skan  bitmap not_read_b;
1634169689Skan
1635169689Skan  /* if the function is not pure, it may reference memory.  Add
1636169689Skan     a VUSE for .GLOBAL_VAR if it has been created.  See add_referenced_var
1637169689Skan     for the heuristic used to decide whether to create .GLOBAL_VAR.  */
1638169689Skan  if (global_var)
1639169689Skan    {
1640169689Skan      add_stmt_operand (&global_var, s_ann, opf_none);
1641169689Skan      return;
1642169689Skan    }
1643169689Skan
1644169689Skan  not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1645169689Skan
1646169689Skan  /* Add a VUSE for each call-clobbered variable.  */
1647169689Skan  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1648169689Skan    {
1649169689Skan      tree var = referenced_var (u);
1650169689Skan      tree real_var = var;
1651169689Skan      bool not_read;
1652169689Skan
1653169689Skan      clobber_stats.readonly_clobbers++;
1654169689Skan
1655169689Skan      /* Not read and not written are computed on regular vars, not
1656169689Skan	 subvars, so look at the parent var if this is an SFT. */
1657169689Skan
1658169689Skan      if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1659169689Skan	real_var = SFT_PARENT_VAR (var);
1660169689Skan
1661169689Skan      not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1662169689Skan	                    : false;
1663169689Skan
1664169689Skan      if (not_read)
1665169689Skan	{
1666169689Skan	  clobber_stats.static_readonly_clobbers_avoided++;
1667169689Skan	  continue;
1668169689Skan	}
1669169689Skan
1670169689Skan      add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1671169689Skan    }
1672169689Skan}
1673169689Skan
1674169689Skan
1675169689Skan/* A subroutine of get_expr_operands to handle CALL_EXPR.  */
1676169689Skan
1677169689Skanstatic void
1678169689Skanget_call_expr_operands (tree stmt, tree expr)
1679169689Skan{
1680169689Skan  tree op;
1681169689Skan  int call_flags = call_expr_flags (expr);
1682169689Skan
1683169689Skan  /* If aliases have been computed already, add V_MAY_DEF or V_USE
1684169689Skan     operands for all the symbols that have been found to be
1685169689Skan     call-clobbered.
1686169689Skan
1687169689Skan     Note that if aliases have not been computed, the global effects
1688169689Skan     of calls will not be included in the SSA web. This is fine
1689169689Skan     because no optimizer should run before aliases have been
1690169689Skan     computed.  By not bothering with virtual operands for CALL_EXPRs
1691169689Skan     we avoid adding superfluous virtual operands, which can be a
1692169689Skan     significant compile time sink (See PR 15855).  */
1693169689Skan  if (aliases_computed_p
1694169689Skan      && !bitmap_empty_p (call_clobbered_vars)
1695169689Skan      && !(call_flags & ECF_NOVOPS))
1696169689Skan    {
1697169689Skan      /* A 'pure' or a 'const' function never call-clobbers anything.
1698169689Skan	 A 'noreturn' function might, but since we don't return anyway
1699169689Skan	 there is no point in recording that.  */
1700169689Skan      if (TREE_SIDE_EFFECTS (expr)
1701169689Skan	  && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1702169689Skan	add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1703169689Skan      else if (!(call_flags & ECF_CONST))
1704169689Skan	add_call_read_ops (stmt, get_callee_fndecl (expr));
1705169689Skan    }
1706169689Skan
1707169689Skan  /* Find uses in the called function.  */
1708169689Skan  get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1709169689Skan
1710169689Skan  for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1711169689Skan    get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1712169689Skan
1713169689Skan  get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1714169689Skan}
1715169689Skan
1716169689Skan
1717169689Skan/* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
1718169689Skan
1719169689Skanstatic void
1720169689Skanget_asm_expr_operands (tree stmt)
1721169689Skan{
1722169689Skan  stmt_ann_t s_ann = stmt_ann (stmt);
1723169689Skan  int noutputs = list_length (ASM_OUTPUTS (stmt));
1724169689Skan  const char **oconstraints
1725169689Skan    = (const char **) alloca ((noutputs) * sizeof (const char *));
1726169689Skan  int i;
1727169689Skan  tree link;
1728169689Skan  const char *constraint;
1729169689Skan  bool allows_mem, allows_reg, is_inout;
1730169689Skan
1731169689Skan  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1732169689Skan    {
1733169689Skan      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1734169689Skan      oconstraints[i] = constraint;
1735169689Skan      parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1736169689Skan	                       &allows_reg, &is_inout);
1737169689Skan
1738169689Skan      /* This should have been split in gimplify_asm_expr.  */
1739169689Skan      gcc_assert (!allows_reg || !is_inout);
1740169689Skan
1741169689Skan      /* Memory operands are addressable.  Note that STMT needs the
1742169689Skan	 address of this operand.  */
1743169689Skan      if (!allows_reg && allows_mem)
1744169689Skan	{
1745169689Skan	  tree t = get_base_address (TREE_VALUE (link));
1746169689Skan	  if (t && DECL_P (t) && s_ann)
1747169689Skan	    add_to_addressable_set (t, &s_ann->addresses_taken);
1748169689Skan	}
1749169689Skan
1750169689Skan      get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1751169689Skan    }
1752169689Skan
1753169689Skan  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1754169689Skan    {
1755169689Skan      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1756169689Skan      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1757169689Skan			      oconstraints, &allows_mem, &allows_reg);
1758169689Skan
1759169689Skan      /* Memory operands are addressable.  Note that STMT needs the
1760169689Skan	 address of this operand.  */
1761169689Skan      if (!allows_reg && allows_mem)
1762169689Skan	{
1763169689Skan	  tree t = get_base_address (TREE_VALUE (link));
1764169689Skan	  if (t && DECL_P (t) && s_ann)
1765169689Skan	    add_to_addressable_set (t, &s_ann->addresses_taken);
1766169689Skan	}
1767169689Skan
1768169689Skan      get_expr_operands (stmt, &TREE_VALUE (link), 0);
1769169689Skan    }
1770169689Skan
1771169689Skan
1772169689Skan  /* Clobber memory for asm ("" : : : "memory");  */
1773169689Skan  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1774169689Skan    if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1775169689Skan      {
1776169689Skan	unsigned i;
1777169689Skan	bitmap_iterator bi;
1778169689Skan
1779169689Skan	/* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1780169689Skan	   decided to group them).  */
1781169689Skan	if (global_var)
1782169689Skan	  add_stmt_operand (&global_var, s_ann, opf_is_def);
1783169689Skan	else
1784169689Skan	  EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1785169689Skan	    {
1786169689Skan	      tree var = referenced_var (i);
1787169689Skan	      add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1788169689Skan	    }
1789169689Skan
1790169689Skan	/* Now clobber all addressables.  */
1791169689Skan	EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1792169689Skan	    {
1793169689Skan	      tree var = referenced_var (i);
1794169689Skan
1795169689Skan	      /* Subvars are explicitly represented in this list, so
1796169689Skan		 we don't need the original to be added to the clobber
1797169689Skan		 ops, but the original *will* be in this list because
1798169689Skan		 we keep the addressability of the original
1799169689Skan		 variable up-to-date so we don't screw up the rest of
1800169689Skan		 the backend.  */
1801169689Skan	      if (var_can_have_subvars (var)
1802169689Skan		  && get_subvars_for_var (var) != NULL)
1803169689Skan		continue;
1804169689Skan
1805169689Skan	      add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1806169689Skan	    }
1807169689Skan
1808169689Skan	break;
1809169689Skan      }
1810169689Skan}
1811169689Skan
1812169689Skan
1813169689Skan/* Scan operands for the assignment expression EXPR in statement STMT.  */
1814169689Skan
1815169689Skanstatic void
1816169689Skanget_modify_expr_operands (tree stmt, tree expr)
1817169689Skan{
1818169689Skan  /* First get operands from the RHS.  */
1819169689Skan  get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1820169689Skan
1821169689Skan  /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1822169689Skan     registers.  If the LHS is a store to memory, we will either need
1823169689Skan     a preserving definition (V_MAY_DEF) or a killing definition
1824169689Skan     (V_MUST_DEF).
1825169689Skan
1826169689Skan     Preserving definitions are those that modify a part of an
1827169689Skan     aggregate object for which no subvars have been computed (or the
1828169689Skan     reference does not correspond exactly to one of them). Stores
1829169689Skan     through a pointer are also represented with V_MAY_DEF operators.
1830169689Skan
1831169689Skan     The determination of whether to use a preserving or a killing
1832169689Skan     definition is done while scanning the LHS of the assignment.  By
1833169689Skan     default, assume that we will emit a V_MUST_DEF.  */
1834169689Skan  get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1835169689Skan}
1836169689Skan
1837169689Skan
1838169689Skan/* Recursively scan the expression pointed to by EXPR_P in statement
1839169689Skan   STMT.  FLAGS is one of the OPF_* constants modifying how to
1840169689Skan   interpret the operands found.  */
1841169689Skan
1842169689Skanstatic void
1843169689Skanget_expr_operands (tree stmt, tree *expr_p, int flags)
1844169689Skan{
1845169689Skan  enum tree_code code;
1846169689Skan  enum tree_code_class class;
1847169689Skan  tree expr = *expr_p;
1848169689Skan  stmt_ann_t s_ann = stmt_ann (stmt);
1849169689Skan
1850169689Skan  if (expr == NULL)
1851169689Skan    return;
1852169689Skan
1853169689Skan  code = TREE_CODE (expr);
1854169689Skan  class = TREE_CODE_CLASS (code);
1855169689Skan
1856169689Skan  switch (code)
1857169689Skan    {
1858169689Skan    case ADDR_EXPR:
1859169689Skan      /* Taking the address of a variable does not represent a
1860169689Skan	 reference to it, but the fact that the statement takes its
1861169689Skan	 address will be of interest to some passes (e.g. alias
1862169689Skan	 resolution).  */
1863169689Skan      add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1864169689Skan
1865169689Skan      /* If the address is invariant, there may be no interesting
1866169689Skan	 variable references inside.  */
1867169689Skan      if (is_gimple_min_invariant (expr))
1868169689Skan	return;
1869169689Skan
1870169689Skan      /* Otherwise, there may be variables referenced inside but there
1871169689Skan	 should be no VUSEs created, since the referenced objects are
1872169689Skan	 not really accessed.  The only operands that we should find
1873169689Skan	 here are ARRAY_REF indices which will always be real operands
1874169689Skan	 (GIMPLE does not allow non-registers as array indices).  */
1875169689Skan      flags |= opf_no_vops;
1876169689Skan      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1877169689Skan      return;
1878169689Skan
1879169689Skan    case SSA_NAME:
1880169689Skan    case STRUCT_FIELD_TAG:
1881169689Skan    case SYMBOL_MEMORY_TAG:
1882169689Skan    case NAME_MEMORY_TAG:
1883169689Skan     add_stmt_operand (expr_p, s_ann, flags);
1884169689Skan     return;
1885169689Skan
1886169689Skan    case VAR_DECL:
1887169689Skan    case PARM_DECL:
1888169689Skan    case RESULT_DECL:
1889169689Skan      {
1890169689Skan	subvar_t svars;
1891169689Skan
1892169689Skan	/* Add the subvars for a variable, if it has subvars, to DEFS
1893169689Skan	   or USES.  Otherwise, add the variable itself.  Whether it
1894169689Skan	   goes to USES or DEFS depends on the operand flags.  */
1895169689Skan	if (var_can_have_subvars (expr)
1896169689Skan	    && (svars = get_subvars_for_var (expr)))
1897169689Skan	  {
1898169689Skan	    subvar_t sv;
1899169689Skan	    for (sv = svars; sv; sv = sv->next)
1900169689Skan	      add_stmt_operand (&sv->var, s_ann, flags);
1901169689Skan	  }
1902169689Skan	else
1903169689Skan	  add_stmt_operand (expr_p, s_ann, flags);
1904169689Skan
1905169689Skan	return;
1906169689Skan      }
1907169689Skan
1908169689Skan    case MISALIGNED_INDIRECT_REF:
1909169689Skan      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1910169689Skan      /* fall through */
1911169689Skan
1912169689Skan    case ALIGN_INDIRECT_REF:
1913169689Skan    case INDIRECT_REF:
1914169689Skan      get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1915169689Skan      return;
1916169689Skan
1917169689Skan    case TARGET_MEM_REF:
1918169689Skan      get_tmr_operands (stmt, expr, flags);
1919169689Skan      return;
1920169689Skan
1921169689Skan    case ARRAY_REF:
1922169689Skan    case ARRAY_RANGE_REF:
1923169689Skan    case COMPONENT_REF:
1924169689Skan    case REALPART_EXPR:
1925169689Skan    case IMAGPART_EXPR:
1926169689Skan      {
1927169689Skan	tree ref;
1928169689Skan	HOST_WIDE_INT offset, size, maxsize;
1929169689Skan	bool none = true;
1930169689Skan
1931169689Skan	/* This component reference becomes an access to all of the
1932169689Skan	   subvariables it can touch, if we can determine that, but
1933169689Skan	   *NOT* the real one.  If we can't determine which fields we
1934169689Skan	   could touch, the recursion will eventually get to a
1935169689Skan	   variable and add *all* of its subvars, or whatever is the
1936169689Skan	   minimum correct subset.  */
1937169689Skan	ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1938169689Skan	if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1939169689Skan	  {
1940169689Skan	    subvar_t sv;
1941169689Skan	    subvar_t svars = get_subvars_for_var (ref);
1942169689Skan
1943169689Skan	    for (sv = svars; sv; sv = sv->next)
1944169689Skan	      {
1945169689Skan		bool exact;
1946169689Skan
1947169689Skan		if (overlap_subvar (offset, maxsize, sv->var, &exact))
1948169689Skan		  {
1949169689Skan	            int subvar_flags = flags;
1950169689Skan		    none = false;
1951169689Skan		    if (!exact || size != maxsize)
1952169689Skan		      subvar_flags &= ~opf_kill_def;
1953169689Skan		    add_stmt_operand (&sv->var, s_ann, subvar_flags);
1954169689Skan		  }
1955169689Skan	      }
1956169689Skan
1957169689Skan	    if (!none)
1958169689Skan	      flags |= opf_no_vops;
1959169689Skan	  }
1960169689Skan	else if (TREE_CODE (ref) == INDIRECT_REF)
1961169689Skan	  {
1962169689Skan	    get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1963169689Skan		                       maxsize, false);
1964169689Skan	    flags |= opf_no_vops;
1965169689Skan	  }
1966169689Skan
1967169689Skan	/* Even if we found subvars above we need to ensure to see
1968169689Skan	   immediate uses for d in s.a[d].  In case of s.a having
1969169689Skan	   a subvar or we would miss it otherwise.  */
1970169689Skan	get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1971169689Skan			   flags & ~opf_kill_def);
1972169689Skan
1973169689Skan	if (code == COMPONENT_REF)
1974169689Skan	  {
1975169689Skan	    if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1976169689Skan	      s_ann->has_volatile_ops = true;
1977169689Skan	    get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1978169689Skan	  }
1979169689Skan	else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1980169689Skan	  {
1981169689Skan            get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1982169689Skan            get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1983169689Skan            get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1984169689Skan	  }
1985169689Skan
1986169689Skan	return;
1987169689Skan      }
1988169689Skan
1989169689Skan    case WITH_SIZE_EXPR:
1990169689Skan      /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1991169689Skan	 and an rvalue reference to its second argument.  */
1992169689Skan      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1993169689Skan      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1994169689Skan      return;
1995169689Skan
1996169689Skan    case CALL_EXPR:
1997169689Skan      get_call_expr_operands (stmt, expr);
1998169689Skan      return;
1999169689Skan
2000169689Skan    case COND_EXPR:
2001169689Skan    case VEC_COND_EXPR:
2002169689Skan      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2003169689Skan      get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2004169689Skan      get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2005169689Skan      return;
2006169689Skan
2007169689Skan    case MODIFY_EXPR:
2008169689Skan      get_modify_expr_operands (stmt, expr);
2009169689Skan      return;
2010169689Skan
2011169689Skan    case CONSTRUCTOR:
2012169689Skan      {
2013169689Skan	/* General aggregate CONSTRUCTORs have been decomposed, but they
2014169689Skan	   are still in use as the COMPLEX_EXPR equivalent for vectors.  */
2015169689Skan	constructor_elt *ce;
2016169689Skan	unsigned HOST_WIDE_INT idx;
2017169689Skan
2018169689Skan	for (idx = 0;
2019169689Skan	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2020169689Skan	     idx++)
2021169689Skan	  get_expr_operands (stmt, &ce->value, opf_none);
2022169689Skan
2023169689Skan	return;
2024169689Skan      }
2025169689Skan
2026169689Skan    case BIT_FIELD_REF:
2027169689Skan      /* Stores using BIT_FIELD_REF are always preserving definitions.  */
2028169689Skan      flags &= ~opf_kill_def;
2029169689Skan
2030169689Skan      /* Fallthru  */
2031169689Skan
2032169689Skan    case TRUTH_NOT_EXPR:
2033169689Skan    case VIEW_CONVERT_EXPR:
2034169689Skan    do_unary:
2035169689Skan      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2036169689Skan      return;
2037169689Skan
2038169689Skan    case TRUTH_AND_EXPR:
2039169689Skan    case TRUTH_OR_EXPR:
2040169689Skan    case TRUTH_XOR_EXPR:
2041169689Skan    case COMPOUND_EXPR:
2042169689Skan    case OBJ_TYPE_REF:
2043169689Skan    case ASSERT_EXPR:
2044169689Skan    do_binary:
2045169689Skan      {
2046169689Skan	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2047169689Skan	get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2048169689Skan	return;
2049169689Skan      }
2050169689Skan
2051169689Skan    case DOT_PROD_EXPR:
2052169689Skan    case REALIGN_LOAD_EXPR:
2053169689Skan      {
2054169689Skan	get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2055169689Skan        get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2056169689Skan        get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2057169689Skan        return;
2058169689Skan      }
2059169689Skan
2060169689Skan    case BLOCK:
2061169689Skan    case FUNCTION_DECL:
2062169689Skan    case EXC_PTR_EXPR:
2063169689Skan    case FILTER_EXPR:
2064169689Skan    case LABEL_DECL:
2065169689Skan    case CONST_DECL:
2066169689Skan    case OMP_PARALLEL:
2067169689Skan    case OMP_SECTIONS:
2068169689Skan    case OMP_FOR:
2069169689Skan    case OMP_SINGLE:
2070169689Skan    case OMP_MASTER:
2071169689Skan    case OMP_ORDERED:
2072169689Skan    case OMP_CRITICAL:
2073169689Skan    case OMP_RETURN:
2074169689Skan    case OMP_CONTINUE:
2075169689Skan      /* Expressions that make no memory references.  */
2076169689Skan      return;
2077169689Skan
2078169689Skan    default:
2079169689Skan      if (class == tcc_unary)
2080169689Skan	goto do_unary;
2081169689Skan      if (class == tcc_binary || class == tcc_comparison)
2082169689Skan	goto do_binary;
2083169689Skan      if (class == tcc_constant || class == tcc_type)
2084169689Skan	return;
2085169689Skan    }
2086169689Skan
2087169689Skan  /* If we get here, something has gone wrong.  */
2088169689Skan#ifdef ENABLE_CHECKING
2089169689Skan  fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2090169689Skan  debug_tree (expr);
2091169689Skan  fputs ("\n", stderr);
2092169689Skan#endif
2093169689Skan  gcc_unreachable ();
2094169689Skan}
2095169689Skan
2096169689Skan
2097169689Skan/* Parse STMT looking for operands.  When finished, the various
2098169689Skan   build_* operand vectors will have potential operands in them.  */
2099169689Skan
2100169689Skanstatic void
2101169689Skanparse_ssa_operands (tree stmt)
2102169689Skan{
2103169689Skan  enum tree_code code;
2104169689Skan
2105169689Skan  code = TREE_CODE (stmt);
2106169689Skan  switch (code)
2107169689Skan    {
2108169689Skan    case MODIFY_EXPR:
2109169689Skan      get_modify_expr_operands (stmt, stmt);
2110169689Skan      break;
2111169689Skan
2112169689Skan    case COND_EXPR:
2113169689Skan      get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2114169689Skan      break;
2115169689Skan
2116169689Skan    case SWITCH_EXPR:
2117169689Skan      get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2118169689Skan      break;
2119169689Skan
2120169689Skan    case ASM_EXPR:
2121169689Skan      get_asm_expr_operands (stmt);
2122169689Skan      break;
2123169689Skan
2124169689Skan    case RETURN_EXPR:
2125169689Skan      get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2126169689Skan      break;
2127169689Skan
2128169689Skan    case GOTO_EXPR:
2129169689Skan      get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2130169689Skan      break;
2131169689Skan
2132169689Skan    case LABEL_EXPR:
2133169689Skan      get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2134169689Skan      break;
2135169689Skan
2136169689Skan    case BIND_EXPR:
2137169689Skan    case CASE_LABEL_EXPR:
2138169689Skan    case TRY_CATCH_EXPR:
2139169689Skan    case TRY_FINALLY_EXPR:
2140169689Skan    case EH_FILTER_EXPR:
2141169689Skan    case CATCH_EXPR:
2142169689Skan    case RESX_EXPR:
2143169689Skan      /* These nodes contain no variable references.  */
2144169689Skan      break;
2145169689Skan
2146169689Skan    default:
2147169689Skan      /* Notice that if get_expr_operands tries to use &STMT as the
2148169689Skan	 operand pointer (which may only happen for USE operands), we
2149169689Skan	 will fail in add_stmt_operand.  This default will handle
2150169689Skan	 statements like empty statements, or CALL_EXPRs that may
2151169689Skan	 appear on the RHS of a statement or as statements themselves.  */
2152169689Skan      get_expr_operands (stmt, &stmt, opf_none);
2153169689Skan      break;
2154169689Skan    }
2155169689Skan}
2156169689Skan
2157169689Skan
2158169689Skan/* Create an operands cache for STMT.  */
2159169689Skan
2160169689Skanstatic void
2161169689Skanbuild_ssa_operands (tree stmt)
2162169689Skan{
2163169689Skan  stmt_ann_t ann = get_stmt_ann (stmt);
2164169689Skan
2165171825Skan  /* Initially assume that the statement has no volatile operands and
2166171825Skan     does not take the address of any symbols.  */
2167169689Skan  if (ann)
2168171825Skan    {
2169171825Skan      ann->has_volatile_ops = false;
2170171825Skan      if (ann->addresses_taken)
2171171825Skan	ann->addresses_taken = NULL;
2172171825Skan    }
2173169689Skan
2174169689Skan  start_ssa_stmt_operands ();
2175169689Skan
2176169689Skan  parse_ssa_operands (stmt);
2177169689Skan  operand_build_sort_virtual (build_vuses);
2178169689Skan  operand_build_sort_virtual (build_v_may_defs);
2179169689Skan  operand_build_sort_virtual (build_v_must_defs);
2180169689Skan
2181169689Skan  finalize_ssa_stmt_operands (stmt);
2182169689Skan}
2183169689Skan
2184169689Skan
2185169689Skan/* Free any operands vectors in OPS.  */
2186169689Skan
2187169689Skanvoid
2188169689Skanfree_ssa_operands (stmt_operands_p ops)
2189169689Skan{
2190169689Skan  ops->def_ops = NULL;
2191169689Skan  ops->use_ops = NULL;
2192169689Skan  ops->maydef_ops = NULL;
2193169689Skan  ops->mustdef_ops = NULL;
2194169689Skan  ops->vuse_ops = NULL;
2195169689Skan}
2196169689Skan
2197169689Skan
2198169689Skan/* Get the operands of statement STMT.  */
2199169689Skan
2200169689Skanvoid
2201169689Skanupdate_stmt_operands (tree stmt)
2202169689Skan{
2203169689Skan  stmt_ann_t ann = get_stmt_ann (stmt);
2204169689Skan
2205169689Skan  /* If update_stmt_operands is called before SSA is initialized, do
2206169689Skan     nothing.  */
2207169689Skan  if (!ssa_operands_active ())
2208169689Skan    return;
2209169689Skan
2210169689Skan  /* The optimizers cannot handle statements that are nothing but a
2211169689Skan     _DECL.  This indicates a bug in the gimplifier.  */
2212169689Skan  gcc_assert (!SSA_VAR_P (stmt));
2213169689Skan
2214169689Skan  gcc_assert (ann->modified);
2215169689Skan
2216169689Skan  timevar_push (TV_TREE_OPS);
2217169689Skan
2218169689Skan  build_ssa_operands (stmt);
2219169689Skan
2220169689Skan  /* Clear the modified bit for STMT.  */
2221169689Skan  ann->modified = 0;
2222169689Skan
2223169689Skan  timevar_pop (TV_TREE_OPS);
2224169689Skan}
2225169689Skan
2226169689Skan
2227169689Skan/* Copies virtual operands from SRC to DST.  */
2228169689Skan
2229169689Skanvoid
2230169689Skancopy_virtual_operands (tree dest, tree src)
2231169689Skan{
2232169689Skan  tree t;
2233169689Skan  ssa_op_iter iter, old_iter;
2234169689Skan  use_operand_p use_p, u2;
2235169689Skan  def_operand_p def_p, d2;
2236169689Skan
2237169689Skan  build_ssa_operands (dest);
2238169689Skan
2239169689Skan  /* Copy all the virtual fields.  */
2240169689Skan  FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2241169689Skan    append_vuse (t);
2242169689Skan  FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2243169689Skan    append_v_may_def (t);
2244169689Skan  FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2245169689Skan    append_v_must_def (t);
2246169689Skan
2247169689Skan  if (VEC_length (tree, build_vuses) == 0
2248169689Skan      && VEC_length (tree, build_v_may_defs) == 0
2249169689Skan      && VEC_length (tree, build_v_must_defs) == 0)
2250169689Skan    return;
2251169689Skan
2252169689Skan  /* Now commit the virtual operands to this stmt.  */
2253169689Skan  finalize_ssa_v_must_defs (dest);
2254169689Skan  finalize_ssa_v_may_defs (dest);
2255169689Skan  finalize_ssa_vuses (dest);
2256169689Skan
2257169689Skan  /* Finally, set the field to the same values as then originals.  */
2258169689Skan  t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2259169689Skan  FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2260169689Skan    {
2261169689Skan      gcc_assert (!op_iter_done (&old_iter));
2262169689Skan      SET_USE (use_p, t);
2263169689Skan      t = op_iter_next_tree (&old_iter);
2264169689Skan    }
2265169689Skan  gcc_assert (op_iter_done (&old_iter));
2266169689Skan
2267169689Skan  op_iter_init_maydef (&old_iter, src, &u2, &d2);
2268169689Skan  FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2269169689Skan    {
2270169689Skan      gcc_assert (!op_iter_done (&old_iter));
2271169689Skan      SET_USE (use_p, USE_FROM_PTR (u2));
2272169689Skan      SET_DEF (def_p, DEF_FROM_PTR (d2));
2273169689Skan      op_iter_next_maymustdef (&u2, &d2, &old_iter);
2274169689Skan    }
2275169689Skan  gcc_assert (op_iter_done (&old_iter));
2276169689Skan
2277169689Skan  op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2278169689Skan  FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2279169689Skan    {
2280169689Skan      gcc_assert (!op_iter_done (&old_iter));
2281169689Skan      SET_USE (use_p, USE_FROM_PTR (u2));
2282169689Skan      SET_DEF (def_p, DEF_FROM_PTR (d2));
2283169689Skan      op_iter_next_maymustdef (&u2, &d2, &old_iter);
2284169689Skan    }
2285169689Skan  gcc_assert (op_iter_done (&old_iter));
2286169689Skan
2287169689Skan}
2288169689Skan
2289169689Skan
2290169689Skan/* Specifically for use in DOM's expression analysis.  Given a store, we
2291169689Skan   create an artificial stmt which looks like a load from the store, this can
2292169689Skan   be used to eliminate redundant loads.  OLD_OPS are the operands from the
2293169689Skan   store stmt, and NEW_STMT is the new load which represents a load of the
2294169689Skan   values stored.  */
2295169689Skan
2296169689Skanvoid
2297169689Skancreate_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2298169689Skan{
2299169689Skan  stmt_ann_t ann;
2300169689Skan  tree op;
2301169689Skan  ssa_op_iter iter;
2302169689Skan  use_operand_p use_p;
2303169689Skan  unsigned x;
2304169689Skan
2305169689Skan  ann = get_stmt_ann (new_stmt);
2306169689Skan
2307169689Skan  /* Process the stmt looking for operands.  */
2308169689Skan  start_ssa_stmt_operands ();
2309169689Skan  parse_ssa_operands (new_stmt);
2310169689Skan
2311169689Skan  for (x = 0; x < VEC_length (tree, build_vuses); x++)
2312169689Skan    {
2313169689Skan      tree t = VEC_index (tree, build_vuses, x);
2314169689Skan      if (TREE_CODE (t) != SSA_NAME)
2315169689Skan	{
2316169689Skan	  var_ann_t ann = var_ann (t);
2317169689Skan	  ann->in_vuse_list = 0;
2318169689Skan	}
2319169689Skan    }
2320169689Skan
2321169689Skan  for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2322169689Skan    {
2323169689Skan      tree t = VEC_index (tree, build_v_may_defs, x);
2324169689Skan      if (TREE_CODE (t) != SSA_NAME)
2325169689Skan	{
2326169689Skan	  var_ann_t ann = var_ann (t);
2327169689Skan	  ann->in_v_may_def_list = 0;
2328169689Skan	}
2329169689Skan    }
2330169689Skan
2331169689Skan  /* Remove any virtual operands that were found.  */
2332169689Skan  VEC_truncate (tree, build_v_may_defs, 0);
2333169689Skan  VEC_truncate (tree, build_v_must_defs, 0);
2334169689Skan  VEC_truncate (tree, build_vuses, 0);
2335169689Skan
2336169689Skan  /* For each VDEF on the original statement, we want to create a
2337169689Skan     VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2338169689Skan     statement.  */
2339169689Skan  FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2340169689Skan			     (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2341169689Skan    append_vuse (op);
2342169689Skan
2343169689Skan  /* Now build the operands for this new stmt.  */
2344169689Skan  finalize_ssa_stmt_operands (new_stmt);
2345169689Skan
2346169689Skan  /* All uses in this fake stmt must not be in the immediate use lists.  */
2347169689Skan  FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2348169689Skan    delink_imm_use (use_p);
2349169689Skan}
2350169689Skan
2351169689Skan
2352169689Skan/* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
2353169689Skan   to test the validity of the swap operation.  */
2354169689Skan
2355169689Skanvoid
2356169689Skanswap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2357169689Skan{
2358169689Skan  tree op0, op1;
2359169689Skan  op0 = *exp0;
2360169689Skan  op1 = *exp1;
2361169689Skan
2362169689Skan  /* If the operand cache is active, attempt to preserve the relative
2363169689Skan     positions of these two operands in their respective immediate use
2364169689Skan     lists.  */
2365169689Skan  if (ssa_operands_active () && op0 != op1)
2366169689Skan    {
2367169689Skan      use_optype_p use0, use1, ptr;
2368169689Skan      use0 = use1 = NULL;
2369169689Skan
2370169689Skan      /* Find the 2 operands in the cache, if they are there.  */
2371169689Skan      for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2372169689Skan	if (USE_OP_PTR (ptr)->use == exp0)
2373169689Skan	  {
2374169689Skan	    use0 = ptr;
2375169689Skan	    break;
2376169689Skan	  }
2377169689Skan
2378169689Skan      for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2379169689Skan	if (USE_OP_PTR (ptr)->use == exp1)
2380169689Skan	  {
2381169689Skan	    use1 = ptr;
2382169689Skan	    break;
2383169689Skan	  }
2384169689Skan
2385169689Skan      /* If both uses don't have operand entries, there isn't much we can do
2386169689Skan         at this point.  Presumably we don't need to worry about it.  */
2387169689Skan      if (use0 && use1)
2388169689Skan        {
2389169689Skan	  tree *tmp = USE_OP_PTR (use1)->use;
2390169689Skan	  USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2391169689Skan	  USE_OP_PTR (use0)->use = tmp;
2392169689Skan	}
2393169689Skan    }
2394169689Skan
2395169689Skan  /* Now swap the data.  */
2396169689Skan  *exp0 = op1;
2397169689Skan  *exp1 = op0;
2398169689Skan}
2399169689Skan
2400169689Skan
2401169689Skan/* Add the base address of REF to the set *ADDRESSES_TAKEN.  If
2402169689Skan   *ADDRESSES_TAKEN is NULL, a new set is created.  REF may be
2403169689Skan   a single variable whose address has been taken or any other valid
2404169689Skan   GIMPLE memory reference (structure reference, array, etc).  If the
2405169689Skan   base address of REF is a decl that has sub-variables, also add all
2406169689Skan   of its sub-variables.  */
2407169689Skan
2408169689Skanvoid
2409169689Skanadd_to_addressable_set (tree ref, bitmap *addresses_taken)
2410169689Skan{
2411169689Skan  tree var;
2412169689Skan  subvar_t svars;
2413169689Skan
2414169689Skan  gcc_assert (addresses_taken);
2415169689Skan
2416169689Skan  /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2417169689Skan     as the only thing we take the address of.  If VAR is a structure,
2418169689Skan     taking the address of a field means that the whole structure may
2419169689Skan     be referenced using pointer arithmetic.  See PR 21407 and the
2420169689Skan     ensuing mailing list discussion.  */
2421169689Skan  var = get_base_address (ref);
2422169689Skan  if (var && SSA_VAR_P (var))
2423169689Skan    {
2424169689Skan      if (*addresses_taken == NULL)
2425169689Skan	*addresses_taken = BITMAP_GGC_ALLOC ();
2426169689Skan
2427169689Skan      if (var_can_have_subvars (var)
2428169689Skan	  && (svars = get_subvars_for_var (var)))
2429169689Skan	{
2430169689Skan	  subvar_t sv;
2431169689Skan	  for (sv = svars; sv; sv = sv->next)
2432169689Skan	    {
2433169689Skan	      bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2434169689Skan	      TREE_ADDRESSABLE (sv->var) = 1;
2435169689Skan	    }
2436169689Skan	}
2437169689Skan      else
2438169689Skan	{
2439169689Skan	  bitmap_set_bit (*addresses_taken, DECL_UID (var));
2440169689Skan	  TREE_ADDRESSABLE (var) = 1;
2441169689Skan	}
2442169689Skan    }
2443169689Skan}
2444169689Skan
2445169689Skan
2446169689Skan/* Scan the immediate_use list for VAR making sure its linked properly.
2447169689Skan   Return TRUE if there is a problem and emit an error message to F.  */
2448169689Skan
2449169689Skanbool
2450169689Skanverify_imm_links (FILE *f, tree var)
2451169689Skan{
2452169689Skan  use_operand_p ptr, prev, list;
2453169689Skan  int count;
2454169689Skan
2455169689Skan  gcc_assert (TREE_CODE (var) == SSA_NAME);
2456169689Skan
2457169689Skan  list = &(SSA_NAME_IMM_USE_NODE (var));
2458169689Skan  gcc_assert (list->use == NULL);
2459169689Skan
2460169689Skan  if (list->prev == NULL)
2461169689Skan    {
2462169689Skan      gcc_assert (list->next == NULL);
2463169689Skan      return false;
2464169689Skan    }
2465169689Skan
2466169689Skan  prev = list;
2467169689Skan  count = 0;
2468169689Skan  for (ptr = list->next; ptr != list; )
2469169689Skan    {
2470169689Skan      if (prev != ptr->prev)
2471169689Skan	goto error;
2472169689Skan
2473169689Skan      if (ptr->use == NULL)
2474169689Skan	goto error; /* 2 roots, or SAFE guard node.  */
2475169689Skan      else if (*(ptr->use) != var)
2476169689Skan	goto error;
2477169689Skan
2478169689Skan      prev = ptr;
2479169689Skan      ptr = ptr->next;
2480169689Skan
2481169689Skan      /* Avoid infinite loops.  50,000,000 uses probably indicates a
2482169689Skan	 problem.  */
2483169689Skan      if (count++ > 50000000)
2484169689Skan	goto error;
2485169689Skan    }
2486169689Skan
2487169689Skan  /* Verify list in the other direction.  */
2488169689Skan  prev = list;
2489169689Skan  for (ptr = list->prev; ptr != list; )
2490169689Skan    {
2491169689Skan      if (prev != ptr->next)
2492169689Skan	goto error;
2493169689Skan      prev = ptr;
2494169689Skan      ptr = ptr->prev;
2495169689Skan      if (count-- < 0)
2496169689Skan	goto error;
2497169689Skan    }
2498169689Skan
2499169689Skan  if (count != 0)
2500169689Skan    goto error;
2501169689Skan
2502169689Skan  return false;
2503169689Skan
2504169689Skan error:
2505169689Skan  if (ptr->stmt && stmt_modified_p (ptr->stmt))
2506169689Skan    {
2507169689Skan      fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2508169689Skan      print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2509169689Skan    }
2510169689Skan  fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2511169689Skan	   (void *)ptr->use);
2512169689Skan  print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2513169689Skan  fprintf(f, "\n");
2514169689Skan  return true;
2515169689Skan}
2516169689Skan
2517169689Skan
2518169689Skan/* Dump all the immediate uses to FILE.  */
2519169689Skan
2520169689Skanvoid
2521169689Skandump_immediate_uses_for (FILE *file, tree var)
2522169689Skan{
2523169689Skan  imm_use_iterator iter;
2524169689Skan  use_operand_p use_p;
2525169689Skan
2526169689Skan  gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2527169689Skan
2528169689Skan  print_generic_expr (file, var, TDF_SLIM);
2529169689Skan  fprintf (file, " : -->");
2530169689Skan  if (has_zero_uses (var))
2531169689Skan    fprintf (file, " no uses.\n");
2532169689Skan  else
2533169689Skan    if (has_single_use (var))
2534169689Skan      fprintf (file, " single use.\n");
2535169689Skan    else
2536169689Skan      fprintf (file, "%d uses.\n", num_imm_uses (var));
2537169689Skan
2538169689Skan  FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2539169689Skan    {
2540169689Skan      if (use_p->stmt == NULL && use_p->use == NULL)
2541169689Skan        fprintf (file, "***end of stmt iterator marker***\n");
2542169689Skan      else
2543169689Skan	if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2544169689Skan	  print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2545169689Skan	else
2546169689Skan	  print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2547169689Skan    }
2548169689Skan  fprintf(file, "\n");
2549169689Skan}
2550169689Skan
2551169689Skan
2552169689Skan/* Dump all the immediate uses to FILE.  */
2553169689Skan
2554169689Skanvoid
2555169689Skandump_immediate_uses (FILE *file)
2556169689Skan{
2557169689Skan  tree var;
2558169689Skan  unsigned int x;
2559169689Skan
2560169689Skan  fprintf (file, "Immediate_uses: \n\n");
2561169689Skan  for (x = 1; x < num_ssa_names; x++)
2562169689Skan    {
2563169689Skan      var = ssa_name(x);
2564169689Skan      if (!var)
2565169689Skan        continue;
2566169689Skan      dump_immediate_uses_for (file, var);
2567169689Skan    }
2568169689Skan}
2569169689Skan
2570169689Skan
2571169689Skan/* Dump def-use edges on stderr.  */
2572169689Skan
2573169689Skanvoid
2574169689Skandebug_immediate_uses (void)
2575169689Skan{
2576169689Skan  dump_immediate_uses (stderr);
2577169689Skan}
2578169689Skan
2579169689Skan
2580169689Skan/* Dump def-use edges on stderr.  */
2581169689Skan
2582169689Skanvoid
2583169689Skandebug_immediate_uses_for (tree var)
2584169689Skan{
2585169689Skan  dump_immediate_uses_for (stderr, var);
2586169689Skan}
2587169689Skan
2588169689Skan#include "gt-tree-ssa-operands.h"
2589