1/* Gimple IR support functions.
2
3   Copyright (C) 2007-2015 Free Software Foundation, Inc.
4   Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "target.h"
27#include "hash-set.h"
28#include "machmode.h"
29#include "vec.h"
30#include "double-int.h"
31#include "input.h"
32#include "alias.h"
33#include "symtab.h"
34#include "wide-int.h"
35#include "inchash.h"
36#include "tree.h"
37#include "fold-const.h"
38#include "calls.h"
39#include "stmt.h"
40#include "stor-layout.h"
41#include "hard-reg-set.h"
42#include "predict.h"
43#include "input.h"
44#include "function.h"
45#include "dominance.h"
46#include "cfg.h"
47#include "basic-block.h"
48#include "tree-ssa-alias.h"
49#include "internal-fn.h"
50#include "tree-eh.h"
51#include "gimple-expr.h"
52#include "is-a.h"
53#include "gimple.h"
54#include "gimple-iterator.h"
55#include "gimple-walk.h"
56#include "gimple.h"
57#include "gimplify.h"
58#include "diagnostic.h"
59#include "value-prof.h"
60#include "flags.h"
61#include "alias.h"
62#include "demangle.h"
63#include "langhooks.h"
64#include "bitmap.h"
65#include "stringpool.h"
66#include "tree-ssanames.h"
67#include "ipa-ref.h"
68#include "lto-streamer.h"
69#include "cgraph.h"
70#include "gimple-ssa.h"
71
72
73/* All the tuples have their operand vector (if present) at the very bottom
74   of the structure.  Therefore, the offset required to find the
75   operands vector the size of the structure minus the size of the 1
76   element tree array at the end (see gimple_ops).  */
77#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
78	(HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
79EXPORTED_CONST size_t gimple_ops_offset_[] = {
80#include "gsstruct.def"
81};
82#undef DEFGSSTRUCT
83
84#define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
85static const size_t gsstruct_code_size[] = {
86#include "gsstruct.def"
87};
88#undef DEFGSSTRUCT
89
90#define DEFGSCODE(SYM, NAME, GSSCODE)	NAME,
91const char *const gimple_code_name[] = {
92#include "gimple.def"
93};
94#undef DEFGSCODE
95
96#define DEFGSCODE(SYM, NAME, GSSCODE)	GSSCODE,
97EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
98#include "gimple.def"
99};
100#undef DEFGSCODE
101
102/* Gimple stats.  */
103
104int gimple_alloc_counts[(int) gimple_alloc_kind_all];
105int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
106
107/* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
108static const char * const gimple_alloc_kind_names[] = {
109    "assignments",
110    "phi nodes",
111    "conditionals",
112    "everything else"
113};
114
115/* Gimple tuple constructors.
116   Note: Any constructor taking a ``gimple_seq'' as a parameter, can
117   be passed a NULL to start with an empty sequence.  */
118
119/* Set the code for statement G to CODE.  */
120
121static inline void
122gimple_set_code (gimple g, enum gimple_code code)
123{
124  g->code = code;
125}
126
127/* Return the number of bytes needed to hold a GIMPLE statement with
128   code CODE.  */
129
130static inline size_t
131gimple_size (enum gimple_code code)
132{
133  return gsstruct_code_size[gss_for_code (code)];
134}
135
136/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
137   operands.  */
138
139gimple
140gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
141{
142  size_t size;
143  gimple stmt;
144
145  size = gimple_size (code);
146  if (num_ops > 0)
147    size += sizeof (tree) * (num_ops - 1);
148
149  if (GATHER_STATISTICS)
150    {
151      enum gimple_alloc_kind kind = gimple_alloc_kind (code);
152      gimple_alloc_counts[(int) kind]++;
153      gimple_alloc_sizes[(int) kind] += size;
154    }
155
156  stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
157  gimple_set_code (stmt, code);
158  gimple_set_num_ops (stmt, num_ops);
159
160  /* Do not call gimple_set_modified here as it has other side
161     effects and this tuple is still not completely built.  */
162  stmt->modified = 1;
163  gimple_init_singleton (stmt);
164
165  return stmt;
166}
167
168/* Set SUBCODE to be the code of the expression computed by statement G.  */
169
170static inline void
171gimple_set_subcode (gimple g, unsigned subcode)
172{
173  /* We only have 16 bits for the RHS code.  Assert that we are not
174     overflowing it.  */
175  gcc_assert (subcode < (1 << 16));
176  g->subcode = subcode;
177}
178
179
180
181/* Build a tuple with operands.  CODE is the statement to build (which
182   must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the subcode
183   for the new tuple.  NUM_OPS is the number of operands to allocate.  */
184
185#define gimple_build_with_ops(c, s, n) \
186  gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
187
188static gimple
189gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
190		            unsigned num_ops MEM_STAT_DECL)
191{
192  gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
193  gimple_set_subcode (s, subcode);
194
195  return s;
196}
197
198
199/* Build a GIMPLE_RETURN statement returning RETVAL.  */
200
201greturn *
202gimple_build_return (tree retval)
203{
204  greturn *s
205    = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
206					       2));
207  if (retval)
208    gimple_return_set_retval (s, retval);
209  return s;
210}
211
212/* Reset alias information on call S.  */
213
214void
215gimple_call_reset_alias_info (gcall *s)
216{
217  if (gimple_call_flags (s) & ECF_CONST)
218    memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
219  else
220    pt_solution_reset (gimple_call_use_set (s));
221  if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
222    memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
223  else
224    pt_solution_reset (gimple_call_clobber_set (s));
225}
226
227/* Helper for gimple_build_call, gimple_build_call_valist,
228   gimple_build_call_vec and gimple_build_call_from_tree.  Build the basic
229   components of a GIMPLE_CALL statement to function FN with NARGS
230   arguments.  */
231
232static inline gcall *
233gimple_build_call_1 (tree fn, unsigned nargs)
234{
235  gcall *s
236    = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
237					     nargs + 3));
238  if (TREE_CODE (fn) == FUNCTION_DECL)
239    fn = build_fold_addr_expr (fn);
240  gimple_set_op (s, 1, fn);
241  gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
242  gimple_call_reset_alias_info (s);
243  return s;
244}
245
246
247/* Build a GIMPLE_CALL statement to function FN with the arguments
248   specified in vector ARGS.  */
249
250gcall *
251gimple_build_call_vec (tree fn, vec<tree> args)
252{
253  unsigned i;
254  unsigned nargs = args.length ();
255  gcall *call = gimple_build_call_1 (fn, nargs);
256
257  for (i = 0; i < nargs; i++)
258    gimple_call_set_arg (call, i, args[i]);
259
260  return call;
261}
262
263
264/* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
265   arguments.  The ... are the arguments.  */
266
267gcall *
268gimple_build_call (tree fn, unsigned nargs, ...)
269{
270  va_list ap;
271  gcall *call;
272  unsigned i;
273
274  gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
275
276  call = gimple_build_call_1 (fn, nargs);
277
278  va_start (ap, nargs);
279  for (i = 0; i < nargs; i++)
280    gimple_call_set_arg (call, i, va_arg (ap, tree));
281  va_end (ap);
282
283  return call;
284}
285
286
287/* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
288   arguments.  AP contains the arguments.  */
289
290gcall *
291gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
292{
293  gcall *call;
294  unsigned i;
295
296  gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
297
298  call = gimple_build_call_1 (fn, nargs);
299
300  for (i = 0; i < nargs; i++)
301    gimple_call_set_arg (call, i, va_arg (ap, tree));
302
303  return call;
304}
305
306
307/* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
308   Build the basic components of a GIMPLE_CALL statement to internal
309   function FN with NARGS arguments.  */
310
311static inline gcall *
312gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
313{
314  gcall *s
315    = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
316					     nargs + 3));
317  s->subcode |= GF_CALL_INTERNAL;
318  gimple_call_set_internal_fn (s, fn);
319  gimple_call_reset_alias_info (s);
320  return s;
321}
322
323
324/* Build a GIMPLE_CALL statement to internal function FN.  NARGS is
325   the number of arguments.  The ... are the arguments.  */
326
327gcall *
328gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
329{
330  va_list ap;
331  gcall *call;
332  unsigned i;
333
334  call = gimple_build_call_internal_1 (fn, nargs);
335  va_start (ap, nargs);
336  for (i = 0; i < nargs; i++)
337    gimple_call_set_arg (call, i, va_arg (ap, tree));
338  va_end (ap);
339
340  return call;
341}
342
343
344/* Build a GIMPLE_CALL statement to internal function FN with the arguments
345   specified in vector ARGS.  */
346
347gcall *
348gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
349{
350  unsigned i, nargs;
351  gcall *call;
352
353  nargs = args.length ();
354  call = gimple_build_call_internal_1 (fn, nargs);
355  for (i = 0; i < nargs; i++)
356    gimple_call_set_arg (call, i, args[i]);
357
358  return call;
359}
360
361
362/* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
363   assumed to be in GIMPLE form already.  Minimal checking is done of
364   this fact.  */
365
366gcall *
367gimple_build_call_from_tree (tree t)
368{
369  unsigned i, nargs;
370  gcall *call;
371  tree fndecl = get_callee_fndecl (t);
372
373  gcc_assert (TREE_CODE (t) == CALL_EXPR);
374
375  nargs = call_expr_nargs (t);
376  call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
377
378  for (i = 0; i < nargs; i++)
379    gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
380
381  gimple_set_block (call, TREE_BLOCK (t));
382
383  /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
384  gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
385  gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
386  gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
387  if (fndecl
388      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
389      && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
390	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
391    gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
392  else
393    gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
394  gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
395  gimple_call_set_nothrow (call, TREE_NOTHROW (t));
396  gimple_set_no_warning (call, TREE_NO_WARNING (t));
397  gimple_call_set_with_bounds (call, CALL_WITH_BOUNDS_P (t));
398
399  return call;
400}
401
402
403/* Build a GIMPLE_ASSIGN statement.
404
405   LHS of the assignment.
406   RHS of the assignment which can be unary or binary.  */
407
408gassign *
409gimple_build_assign (tree lhs, tree rhs MEM_STAT_DECL)
410{
411  enum tree_code subcode;
412  tree op1, op2, op3;
413
414  extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3);
415  return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
416}
417
418
419/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
420   OP1, OP2 and OP3.  */
421
422static inline gassign *
423gimple_build_assign_1 (tree lhs, enum tree_code subcode, tree op1,
424		       tree op2, tree op3 MEM_STAT_DECL)
425{
426  unsigned num_ops;
427  gassign *p;
428
429  /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
430     code).  */
431  num_ops = get_gimple_rhs_num_ops (subcode) + 1;
432
433  p = as_a <gassign *> (
434        gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
435				    PASS_MEM_STAT));
436  gimple_assign_set_lhs (p, lhs);
437  gimple_assign_set_rhs1 (p, op1);
438  if (op2)
439    {
440      gcc_assert (num_ops > 2);
441      gimple_assign_set_rhs2 (p, op2);
442    }
443
444  if (op3)
445    {
446      gcc_assert (num_ops > 3);
447      gimple_assign_set_rhs3 (p, op3);
448    }
449
450  return p;
451}
452
453/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
454   OP1, OP2 and OP3.  */
455
456gassign *
457gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
458		     tree op2, tree op3 MEM_STAT_DECL)
459{
460  return gimple_build_assign_1 (lhs, subcode, op1, op2, op3 PASS_MEM_STAT);
461}
462
463/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
464   OP1 and OP2.  */
465
466gassign *
467gimple_build_assign (tree lhs, enum tree_code subcode, tree op1,
468		     tree op2 MEM_STAT_DECL)
469{
470  return gimple_build_assign_1 (lhs, subcode, op1, op2, NULL_TREE
471				PASS_MEM_STAT);
472}
473
474/* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operand OP1.  */
475
476gassign *
477gimple_build_assign (tree lhs, enum tree_code subcode, tree op1 MEM_STAT_DECL)
478{
479  return gimple_build_assign_1 (lhs, subcode, op1, NULL_TREE, NULL_TREE
480				PASS_MEM_STAT);
481}
482
483
484/* Build a GIMPLE_COND statement.
485
486   PRED is the condition used to compare LHS and the RHS.
487   T_LABEL is the label to jump to if the condition is true.
488   F_LABEL is the label to jump to otherwise.  */
489
490gcond *
491gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
492		   tree t_label, tree f_label)
493{
494  gcond *p;
495
496  gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
497  p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
498  gimple_cond_set_lhs (p, lhs);
499  gimple_cond_set_rhs (p, rhs);
500  gimple_cond_set_true_label (p, t_label);
501  gimple_cond_set_false_label (p, f_label);
502  return p;
503}
504
505/* Build a GIMPLE_COND statement from the conditional expression tree
506   COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
507
508gcond *
509gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
510{
511  enum tree_code code;
512  tree lhs, rhs;
513
514  gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
515  return gimple_build_cond (code, lhs, rhs, t_label, f_label);
516}
517
518/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
519   boolean expression tree COND.  */
520
521void
522gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
523{
524  enum tree_code code;
525  tree lhs, rhs;
526
527  gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
528  gimple_cond_set_condition (stmt, code, lhs, rhs);
529}
530
531/* Build a GIMPLE_LABEL statement for LABEL.  */
532
533glabel *
534gimple_build_label (tree label)
535{
536  glabel *p
537    = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
538  gimple_label_set_label (p, label);
539  return p;
540}
541
542/* Build a GIMPLE_GOTO statement to label DEST.  */
543
544ggoto *
545gimple_build_goto (tree dest)
546{
547  ggoto *p
548    = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
549  gimple_goto_set_dest (p, dest);
550  return p;
551}
552
553
554/* Build a GIMPLE_NOP statement.  */
555
556gimple
557gimple_build_nop (void)
558{
559  return gimple_alloc (GIMPLE_NOP, 0);
560}
561
562
563/* Build a GIMPLE_BIND statement.
564   VARS are the variables in BODY.
565   BLOCK is the containing block.  */
566
567gbind *
568gimple_build_bind (tree vars, gimple_seq body, tree block)
569{
570  gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
571  gimple_bind_set_vars (p, vars);
572  if (body)
573    gimple_bind_set_body (p, body);
574  if (block)
575    gimple_bind_set_block (p, block);
576  return p;
577}
578
579/* Helper function to set the simple fields of a asm stmt.
580
581   STRING is a pointer to a string that is the asm blocks assembly code.
582   NINPUT is the number of register inputs.
583   NOUTPUT is the number of register outputs.
584   NCLOBBERS is the number of clobbered registers.
585   */
586
587static inline gasm *
588gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
589                    unsigned nclobbers, unsigned nlabels)
590{
591  gasm *p;
592  int size = strlen (string);
593
594  /* ASMs with labels cannot have outputs.  This should have been
595     enforced by the front end.  */
596  gcc_assert (nlabels == 0 || noutputs == 0);
597
598  p = as_a <gasm *> (
599        gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
600			       ninputs + noutputs + nclobbers + nlabels));
601
602  p->ni = ninputs;
603  p->no = noutputs;
604  p->nc = nclobbers;
605  p->nl = nlabels;
606  p->string = ggc_alloc_string (string, size);
607
608  if (GATHER_STATISTICS)
609    gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
610
611  return p;
612}
613
614/* Build a GIMPLE_ASM statement.
615
616   STRING is the assembly code.
617   NINPUT is the number of register inputs.
618   NOUTPUT is the number of register outputs.
619   NCLOBBERS is the number of clobbered registers.
620   INPUTS is a vector of the input register parameters.
621   OUTPUTS is a vector of the output register parameters.
622   CLOBBERS is a vector of the clobbered register parameters.
623   LABELS is a vector of destination labels.  */
624
625gasm *
626gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
627                      vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
628		      vec<tree, va_gc> *labels)
629{
630  gasm *p;
631  unsigned i;
632
633  p = gimple_build_asm_1 (string,
634                          vec_safe_length (inputs),
635                          vec_safe_length (outputs),
636                          vec_safe_length (clobbers),
637			  vec_safe_length (labels));
638
639  for (i = 0; i < vec_safe_length (inputs); i++)
640    gimple_asm_set_input_op (p, i, (*inputs)[i]);
641
642  for (i = 0; i < vec_safe_length (outputs); i++)
643    gimple_asm_set_output_op (p, i, (*outputs)[i]);
644
645  for (i = 0; i < vec_safe_length (clobbers); i++)
646    gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
647
648  for (i = 0; i < vec_safe_length (labels); i++)
649    gimple_asm_set_label_op (p, i, (*labels)[i]);
650
651  return p;
652}
653
654/* Build a GIMPLE_CATCH statement.
655
656  TYPES are the catch types.
657  HANDLER is the exception handler.  */
658
659gcatch *
660gimple_build_catch (tree types, gimple_seq handler)
661{
662  gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
663  gimple_catch_set_types (p, types);
664  if (handler)
665    gimple_catch_set_handler (p, handler);
666
667  return p;
668}
669
670/* Build a GIMPLE_EH_FILTER statement.
671
672   TYPES are the filter's types.
673   FAILURE is the filter's failure action.  */
674
675geh_filter *
676gimple_build_eh_filter (tree types, gimple_seq failure)
677{
678  geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
679  gimple_eh_filter_set_types (p, types);
680  if (failure)
681    gimple_eh_filter_set_failure (p, failure);
682
683  return p;
684}
685
686/* Build a GIMPLE_EH_MUST_NOT_THROW statement.  */
687
688geh_mnt *
689gimple_build_eh_must_not_throw (tree decl)
690{
691  geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
692
693  gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
694  gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
695  gimple_eh_must_not_throw_set_fndecl (p, decl);
696
697  return p;
698}
699
700/* Build a GIMPLE_EH_ELSE statement.  */
701
702geh_else *
703gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
704{
705  geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
706  gimple_eh_else_set_n_body (p, n_body);
707  gimple_eh_else_set_e_body (p, e_body);
708  return p;
709}
710
711/* Build a GIMPLE_TRY statement.
712
713   EVAL is the expression to evaluate.
714   CLEANUP is the cleanup expression.
715   KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
716   whether this is a try/catch or a try/finally respectively.  */
717
718gtry *
719gimple_build_try (gimple_seq eval, gimple_seq cleanup,
720    		  enum gimple_try_flags kind)
721{
722  gtry *p;
723
724  gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
725  p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
726  gimple_set_subcode (p, kind);
727  if (eval)
728    gimple_try_set_eval (p, eval);
729  if (cleanup)
730    gimple_try_set_cleanup (p, cleanup);
731
732  return p;
733}
734
735/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
736
737   CLEANUP is the cleanup expression.  */
738
739gimple
740gimple_build_wce (gimple_seq cleanup)
741{
742  gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
743  if (cleanup)
744    gimple_wce_set_cleanup (p, cleanup);
745
746  return p;
747}
748
749
750/* Build a GIMPLE_RESX statement.  */
751
752gresx *
753gimple_build_resx (int region)
754{
755  gresx *p
756    = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
757  p->region = region;
758  return p;
759}
760
761
762/* The helper for constructing a gimple switch statement.
763   INDEX is the switch's index.
764   NLABELS is the number of labels in the switch excluding the default.
765   DEFAULT_LABEL is the default label for the switch statement.  */
766
767gswitch *
768gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
769{
770  /* nlabels + 1 default label + 1 index.  */
771  gcc_checking_assert (default_label);
772  gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
773							ERROR_MARK,
774							1 + 1 + nlabels));
775  gimple_switch_set_index (p, index);
776  gimple_switch_set_default_label (p, default_label);
777  return p;
778}
779
780/* Build a GIMPLE_SWITCH statement.
781
782   INDEX is the switch's index.
783   DEFAULT_LABEL is the default label
784   ARGS is a vector of labels excluding the default.  */
785
786gswitch *
787gimple_build_switch (tree index, tree default_label, vec<tree> args)
788{
789  unsigned i, nlabels = args.length ();
790
791  gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
792
793  /* Copy the labels from the vector to the switch statement.  */
794  for (i = 0; i < nlabels; i++)
795    gimple_switch_set_label (p, i + 1, args[i]);
796
797  return p;
798}
799
800/* Build a GIMPLE_EH_DISPATCH statement.  */
801
802geh_dispatch *
803gimple_build_eh_dispatch (int region)
804{
805  geh_dispatch *p
806    = as_a <geh_dispatch *> (
807	gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
808  p->region = region;
809  return p;
810}
811
812/* Build a new GIMPLE_DEBUG_BIND statement.
813
814   VAR is bound to VALUE; block and location are taken from STMT.  */
815
816gdebug *
817gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
818{
819  gdebug *p
820    = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
821						   (unsigned)GIMPLE_DEBUG_BIND, 2
822						   PASS_MEM_STAT));
823  gimple_debug_bind_set_var (p, var);
824  gimple_debug_bind_set_value (p, value);
825  if (stmt)
826    gimple_set_location (p, gimple_location (stmt));
827
828  return p;
829}
830
831
832/* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
833
834   VAR is bound to VALUE; block and location are taken from STMT.  */
835
836gdebug *
837gimple_build_debug_source_bind_stat (tree var, tree value,
838				     gimple stmt MEM_STAT_DECL)
839{
840  gdebug *p
841    = as_a <gdebug *> (
842        gimple_build_with_ops_stat (GIMPLE_DEBUG,
843				    (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
844				    PASS_MEM_STAT));
845
846  gimple_debug_source_bind_set_var (p, var);
847  gimple_debug_source_bind_set_value (p, value);
848  if (stmt)
849    gimple_set_location (p, gimple_location (stmt));
850
851  return p;
852}
853
854
855/* Build a GIMPLE_OMP_CRITICAL statement.
856
857   BODY is the sequence of statements for which only one thread can execute.
858   NAME is optional identifier for this critical block.  */
859
860gomp_critical *
861gimple_build_omp_critical (gimple_seq body, tree name)
862{
863  gomp_critical *p
864    = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
865  gimple_omp_critical_set_name (p, name);
866  if (body)
867    gimple_omp_set_body (p, body);
868
869  return p;
870}
871
872/* Build a GIMPLE_OMP_FOR statement.
873
874   BODY is sequence of statements inside the for loop.
875   KIND is the `for' variant.
876   CLAUSES, are any of the construct's clauses.
877   COLLAPSE is the collapse count.
878   PRE_BODY is the sequence of statements that are loop invariant.  */
879
880gomp_for *
881gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
882		      gimple_seq pre_body)
883{
884  gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
885  if (body)
886    gimple_omp_set_body (p, body);
887  gimple_omp_for_set_clauses (p, clauses);
888  gimple_omp_for_set_kind (p, kind);
889  p->collapse = collapse;
890  p->iter =  ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
891
892  if (pre_body)
893    gimple_omp_for_set_pre_body (p, pre_body);
894
895  return p;
896}
897
898
899/* Build a GIMPLE_OMP_PARALLEL statement.
900
901   BODY is sequence of statements which are executed in parallel.
902   CLAUSES, are the OMP parallel construct's clauses.
903   CHILD_FN is the function created for the parallel threads to execute.
904   DATA_ARG are the shared data argument(s).  */
905
906gomp_parallel *
907gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
908			   tree data_arg)
909{
910  gomp_parallel *p
911    = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
912  if (body)
913    gimple_omp_set_body (p, body);
914  gimple_omp_parallel_set_clauses (p, clauses);
915  gimple_omp_parallel_set_child_fn (p, child_fn);
916  gimple_omp_parallel_set_data_arg (p, data_arg);
917
918  return p;
919}
920
921
922/* Build a GIMPLE_OMP_TASK statement.
923
924   BODY is sequence of statements which are executed by the explicit task.
925   CLAUSES, are the OMP parallel construct's clauses.
926   CHILD_FN is the function created for the parallel threads to execute.
927   DATA_ARG are the shared data argument(s).
928   COPY_FN is the optional function for firstprivate initialization.
929   ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
930
931gomp_task *
932gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
933		       tree data_arg, tree copy_fn, tree arg_size,
934		       tree arg_align)
935{
936  gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
937  if (body)
938    gimple_omp_set_body (p, body);
939  gimple_omp_task_set_clauses (p, clauses);
940  gimple_omp_task_set_child_fn (p, child_fn);
941  gimple_omp_task_set_data_arg (p, data_arg);
942  gimple_omp_task_set_copy_fn (p, copy_fn);
943  gimple_omp_task_set_arg_size (p, arg_size);
944  gimple_omp_task_set_arg_align (p, arg_align);
945
946  return p;
947}
948
949
950/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
951
952   BODY is the sequence of statements in the section.  */
953
954gimple
955gimple_build_omp_section (gimple_seq body)
956{
957  gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
958  if (body)
959    gimple_omp_set_body (p, body);
960
961  return p;
962}
963
964
965/* Build a GIMPLE_OMP_MASTER statement.
966
967   BODY is the sequence of statements to be executed by just the master.  */
968
969gimple
970gimple_build_omp_master (gimple_seq body)
971{
972  gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
973  if (body)
974    gimple_omp_set_body (p, body);
975
976  return p;
977}
978
979
980/* Build a GIMPLE_OMP_TASKGROUP statement.
981
982   BODY is the sequence of statements to be executed by the taskgroup
983   construct.  */
984
985gimple
986gimple_build_omp_taskgroup (gimple_seq body)
987{
988  gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
989  if (body)
990    gimple_omp_set_body (p, body);
991
992  return p;
993}
994
995
996/* Build a GIMPLE_OMP_CONTINUE statement.
997
998   CONTROL_DEF is the definition of the control variable.
999   CONTROL_USE is the use of the control variable.  */
1000
1001gomp_continue *
1002gimple_build_omp_continue (tree control_def, tree control_use)
1003{
1004  gomp_continue *p
1005    = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
1006  gimple_omp_continue_set_control_def (p, control_def);
1007  gimple_omp_continue_set_control_use (p, control_use);
1008  return p;
1009}
1010
1011/* Build a GIMPLE_OMP_ORDERED statement.
1012
1013   BODY is the sequence of statements inside a loop that will executed in
1014   sequence.  */
1015
1016gimple
1017gimple_build_omp_ordered (gimple_seq body)
1018{
1019  gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1020  if (body)
1021    gimple_omp_set_body (p, body);
1022
1023  return p;
1024}
1025
1026
1027/* Build a GIMPLE_OMP_RETURN statement.
1028   WAIT_P is true if this is a non-waiting return.  */
1029
1030gimple
1031gimple_build_omp_return (bool wait_p)
1032{
1033  gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1034  if (wait_p)
1035    gimple_omp_return_set_nowait (p);
1036
1037  return p;
1038}
1039
1040
1041/* Build a GIMPLE_OMP_SECTIONS statement.
1042
1043   BODY is a sequence of section statements.
1044   CLAUSES are any of the OMP sections contsruct's clauses: private,
1045   firstprivate, lastprivate, reduction, and nowait.  */
1046
1047gomp_sections *
1048gimple_build_omp_sections (gimple_seq body, tree clauses)
1049{
1050  gomp_sections *p
1051    = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
1052  if (body)
1053    gimple_omp_set_body (p, body);
1054  gimple_omp_sections_set_clauses (p, clauses);
1055
1056  return p;
1057}
1058
1059
1060/* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
1061
1062gimple
1063gimple_build_omp_sections_switch (void)
1064{
1065  return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1066}
1067
1068
1069/* Build a GIMPLE_OMP_SINGLE statement.
1070
1071   BODY is the sequence of statements that will be executed once.
1072   CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1073   copyprivate, nowait.  */
1074
1075gomp_single *
1076gimple_build_omp_single (gimple_seq body, tree clauses)
1077{
1078  gomp_single *p
1079    = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
1080  if (body)
1081    gimple_omp_set_body (p, body);
1082  gimple_omp_single_set_clauses (p, clauses);
1083
1084  return p;
1085}
1086
1087
1088/* Build a GIMPLE_OMP_TARGET statement.
1089
1090   BODY is the sequence of statements that will be executed.
1091   KIND is the kind of the region.
1092   CLAUSES are any of the construct's clauses.  */
1093
1094gomp_target *
1095gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1096{
1097  gomp_target *p
1098    = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
1099  if (body)
1100    gimple_omp_set_body (p, body);
1101  gimple_omp_target_set_clauses (p, clauses);
1102  gimple_omp_target_set_kind (p, kind);
1103
1104  return p;
1105}
1106
1107
1108/* Build a GIMPLE_OMP_TEAMS statement.
1109
1110   BODY is the sequence of statements that will be executed.
1111   CLAUSES are any of the OMP teams construct's clauses.  */
1112
1113gomp_teams *
1114gimple_build_omp_teams (gimple_seq body, tree clauses)
1115{
1116  gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
1117  if (body)
1118    gimple_omp_set_body (p, body);
1119  gimple_omp_teams_set_clauses (p, clauses);
1120
1121  return p;
1122}
1123
1124
1125/* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
1126
1127gomp_atomic_load *
1128gimple_build_omp_atomic_load (tree lhs, tree rhs)
1129{
1130  gomp_atomic_load *p
1131    = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
1132  gimple_omp_atomic_load_set_lhs (p, lhs);
1133  gimple_omp_atomic_load_set_rhs (p, rhs);
1134  return p;
1135}
1136
1137/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1138
1139   VAL is the value we are storing.  */
1140
1141gomp_atomic_store *
1142gimple_build_omp_atomic_store (tree val)
1143{
1144  gomp_atomic_store *p
1145    = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
1146  gimple_omp_atomic_store_set_val (p, val);
1147  return p;
1148}
1149
1150/* Build a GIMPLE_TRANSACTION statement.  */
1151
1152gtransaction *
1153gimple_build_transaction (gimple_seq body, tree label)
1154{
1155  gtransaction *p
1156    = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
1157  gimple_transaction_set_body (p, body);
1158  gimple_transaction_set_label (p, label);
1159  return p;
1160}
1161
1162/* Build a GIMPLE_PREDICT statement.  PREDICT is one of the predictors from
1163   predict.def, OUTCOME is NOT_TAKEN or TAKEN.  */
1164
1165gimple
1166gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1167{
1168  gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1169  /* Ensure all the predictors fit into the lower bits of the subcode.  */
1170  gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1171  gimple_predict_set_predictor (p, predictor);
1172  gimple_predict_set_outcome (p, outcome);
1173  return p;
1174}
1175
1176#if defined ENABLE_GIMPLE_CHECKING
1177/* Complain of a gimple type mismatch and die.  */
1178
1179void
1180gimple_check_failed (const_gimple gs, const char *file, int line,
1181		     const char *function, enum gimple_code code,
1182		     enum tree_code subcode)
1183{
1184  internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1185      		  gimple_code_name[code],
1186		  get_tree_code_name (subcode),
1187		  gimple_code_name[gimple_code (gs)],
1188		  gs->subcode > 0
1189		    ? get_tree_code_name ((enum tree_code) gs->subcode)
1190		    : "",
1191		  function, trim_filename (file), line);
1192}
1193#endif /* ENABLE_GIMPLE_CHECKING */
1194
1195
1196/* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1197   *SEQ_P is NULL, a new sequence is allocated.  */
1198
1199void
1200gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1201{
1202  gimple_stmt_iterator si;
1203  if (gs == NULL)
1204    return;
1205
1206  si = gsi_last (*seq_p);
1207  gsi_insert_after (&si, gs, GSI_NEW_STMT);
1208}
1209
1210/* Link gimple statement GS to the end of the sequence *SEQ_P.  If
1211   *SEQ_P is NULL, a new sequence is allocated.  This function is
1212   similar to gimple_seq_add_stmt, but does not scan the operands.
1213   During gimplification, we need to manipulate statement sequences
1214   before the def/use vectors have been constructed.  */
1215
1216void
1217gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
1218{
1219  gimple_stmt_iterator si;
1220
1221  if (gs == NULL)
1222    return;
1223
1224  si = gsi_last (*seq_p);
1225  gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1226}
1227
1228/* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1229   NULL, a new sequence is allocated.  */
1230
1231void
1232gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1233{
1234  gimple_stmt_iterator si;
1235  if (src == NULL)
1236    return;
1237
1238  si = gsi_last (*dst_p);
1239  gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1240}
1241
1242/* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
1243   NULL, a new sequence is allocated.  This function is
1244   similar to gimple_seq_add_seq, but does not scan the operands.  */
1245
1246void
1247gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1248{
1249  gimple_stmt_iterator si;
1250  if (src == NULL)
1251    return;
1252
1253  si = gsi_last (*dst_p);
1254  gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1255}
1256
1257/* Determine whether to assign a location to the statement GS.  */
1258
1259static bool
1260should_carry_location_p (gimple gs)
1261{
1262  /* Don't emit a line note for a label.  We particularly don't want to
1263     emit one for the break label, since it doesn't actually correspond
1264     to the beginning of the loop/switch.  */
1265  if (gimple_code (gs) == GIMPLE_LABEL)
1266    return false;
1267
1268  return true;
1269}
1270
1271/* Set the location for gimple statement GS to LOCATION.  */
1272
1273static void
1274annotate_one_with_location (gimple gs, location_t location)
1275{
1276  if (!gimple_has_location (gs)
1277      && !gimple_do_not_emit_location_p (gs)
1278      && should_carry_location_p (gs))
1279    gimple_set_location (gs, location);
1280}
1281
1282/* Set LOCATION for all the statements after iterator GSI in sequence
1283   SEQ.  If GSI is pointing to the end of the sequence, start with the
1284   first statement in SEQ.  */
1285
1286void
1287annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1288				  location_t location)
1289{
1290  if (gsi_end_p (gsi))
1291    gsi = gsi_start (seq);
1292  else
1293    gsi_next (&gsi);
1294
1295  for (; !gsi_end_p (gsi); gsi_next (&gsi))
1296    annotate_one_with_location (gsi_stmt (gsi), location);
1297}
1298
1299/* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
1300
1301void
1302annotate_all_with_location (gimple_seq stmt_p, location_t location)
1303{
1304  gimple_stmt_iterator i;
1305
1306  if (gimple_seq_empty_p (stmt_p))
1307    return;
1308
1309  for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1310    {
1311      gimple gs = gsi_stmt (i);
1312      annotate_one_with_location (gs, location);
1313    }
1314}
1315
1316/* Helper function of empty_body_p.  Return true if STMT is an empty
1317   statement.  */
1318
1319static bool
1320empty_stmt_p (gimple stmt)
1321{
1322  if (gimple_code (stmt) == GIMPLE_NOP)
1323    return true;
1324  if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
1325    return empty_body_p (gimple_bind_body (bind_stmt));
1326  return false;
1327}
1328
1329
1330/* Return true if BODY contains nothing but empty statements.  */
1331
1332bool
1333empty_body_p (gimple_seq body)
1334{
1335  gimple_stmt_iterator i;
1336
1337  if (gimple_seq_empty_p (body))
1338    return true;
1339  for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1340    if (!empty_stmt_p (gsi_stmt (i))
1341	&& !is_gimple_debug (gsi_stmt (i)))
1342      return false;
1343
1344  return true;
1345}
1346
1347
1348/* Perform a deep copy of sequence SRC and return the result.  */
1349
1350gimple_seq
1351gimple_seq_copy (gimple_seq src)
1352{
1353  gimple_stmt_iterator gsi;
1354  gimple_seq new_seq = NULL;
1355  gimple stmt;
1356
1357  for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1358    {
1359      stmt = gimple_copy (gsi_stmt (gsi));
1360      gimple_seq_add_stmt (&new_seq, stmt);
1361    }
1362
1363  return new_seq;
1364}
1365
1366
1367
1368/* Return true if calls C1 and C2 are known to go to the same function.  */
1369
1370bool
1371gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1372{
1373  if (gimple_call_internal_p (c1))
1374    return (gimple_call_internal_p (c2)
1375	    && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1376  else
1377    return (gimple_call_fn (c1) == gimple_call_fn (c2)
1378	    || (gimple_call_fndecl (c1)
1379		&& gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1380}
1381
1382/* Detect flags from a GIMPLE_CALL.  This is just like
1383   call_expr_flags, but for gimple tuples.  */
1384
1385int
1386gimple_call_flags (const_gimple stmt)
1387{
1388  int flags;
1389  tree decl = gimple_call_fndecl (stmt);
1390
1391  if (decl)
1392    flags = flags_from_decl_or_type (decl);
1393  else if (gimple_call_internal_p (stmt))
1394    flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1395  else
1396    flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1397
1398  if (stmt->subcode & GF_CALL_NOTHROW)
1399    flags |= ECF_NOTHROW;
1400
1401  return flags;
1402}
1403
1404/* Return the "fn spec" string for call STMT.  */
1405
1406static const_tree
1407gimple_call_fnspec (const gcall *stmt)
1408{
1409  tree type, attr;
1410
1411  if (gimple_call_internal_p (stmt))
1412    return internal_fn_fnspec (gimple_call_internal_fn (stmt));
1413
1414  type = gimple_call_fntype (stmt);
1415  if (!type)
1416    return NULL_TREE;
1417
1418  attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1419  if (!attr)
1420    return NULL_TREE;
1421
1422  return TREE_VALUE (TREE_VALUE (attr));
1423}
1424
1425/* Detects argument flags for argument number ARG on call STMT.  */
1426
1427int
1428gimple_call_arg_flags (const gcall *stmt, unsigned arg)
1429{
1430  const_tree attr = gimple_call_fnspec (stmt);
1431
1432  if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1433    return 0;
1434
1435  switch (TREE_STRING_POINTER (attr)[1 + arg])
1436    {
1437    case 'x':
1438    case 'X':
1439      return EAF_UNUSED;
1440
1441    case 'R':
1442      return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1443
1444    case 'r':
1445      return EAF_NOCLOBBER | EAF_NOESCAPE;
1446
1447    case 'W':
1448      return EAF_DIRECT | EAF_NOESCAPE;
1449
1450    case 'w':
1451      return EAF_NOESCAPE;
1452
1453    case '.':
1454    default:
1455      return 0;
1456    }
1457}
1458
1459/* Detects return flags for the call STMT.  */
1460
1461int
1462gimple_call_return_flags (const gcall *stmt)
1463{
1464  const_tree attr;
1465
1466  if (gimple_call_flags (stmt) & ECF_MALLOC)
1467    return ERF_NOALIAS;
1468
1469  attr = gimple_call_fnspec (stmt);
1470  if (!attr || TREE_STRING_LENGTH (attr) < 1)
1471    return 0;
1472
1473  switch (TREE_STRING_POINTER (attr)[0])
1474    {
1475    case '1':
1476    case '2':
1477    case '3':
1478    case '4':
1479      return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1480
1481    case 'm':
1482      return ERF_NOALIAS;
1483
1484    case '.':
1485    default:
1486      return 0;
1487    }
1488}
1489
1490
1491/* Return true if GS is a copy assignment.  */
1492
1493bool
1494gimple_assign_copy_p (gimple gs)
1495{
1496  return (gimple_assign_single_p (gs)
1497	  && is_gimple_val (gimple_op (gs, 1)));
1498}
1499
1500
1501/* Return true if GS is a SSA_NAME copy assignment.  */
1502
1503bool
1504gimple_assign_ssa_name_copy_p (gimple gs)
1505{
1506  return (gimple_assign_single_p (gs)
1507	  && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1508	  && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1509}
1510
1511
1512/* Return true if GS is an assignment with a unary RHS, but the
1513   operator has no effect on the assigned value.  The logic is adapted
1514   from STRIP_NOPS.  This predicate is intended to be used in tuplifying
1515   instances in which STRIP_NOPS was previously applied to the RHS of
1516   an assignment.
1517
1518   NOTE: In the use cases that led to the creation of this function
1519   and of gimple_assign_single_p, it is typical to test for either
1520   condition and to proceed in the same manner.  In each case, the
1521   assigned value is represented by the single RHS operand of the
1522   assignment.  I suspect there may be cases where gimple_assign_copy_p,
1523   gimple_assign_single_p, or equivalent logic is used where a similar
1524   treatment of unary NOPs is appropriate.  */
1525
1526bool
1527gimple_assign_unary_nop_p (gimple gs)
1528{
1529  return (is_gimple_assign (gs)
1530          && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1531              || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1532          && gimple_assign_rhs1 (gs) != error_mark_node
1533          && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1534              == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1535}
1536
1537/* Set BB to be the basic block holding G.  */
1538
1539void
1540gimple_set_bb (gimple stmt, basic_block bb)
1541{
1542  stmt->bb = bb;
1543
1544  if (gimple_code (stmt) != GIMPLE_LABEL)
1545    return;
1546
1547  /* If the statement is a label, add the label to block-to-labels map
1548     so that we can speed up edge creation for GIMPLE_GOTOs.  */
1549  if (cfun->cfg)
1550    {
1551      tree t;
1552      int uid;
1553
1554      t = gimple_label_label (as_a <glabel *> (stmt));
1555      uid = LABEL_DECL_UID (t);
1556      if (uid == -1)
1557	{
1558	  unsigned old_len =
1559	    vec_safe_length (label_to_block_map_for_fn (cfun));
1560	  LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1561	  if (old_len <= (unsigned) uid)
1562	    {
1563	      unsigned new_len = 3 * uid / 2 + 1;
1564
1565	      vec_safe_grow_cleared (label_to_block_map_for_fn (cfun),
1566				     new_len);
1567	    }
1568	}
1569
1570      (*label_to_block_map_for_fn (cfun))[uid] = bb;
1571    }
1572}
1573
1574
1575/* Modify the RHS of the assignment pointed-to by GSI using the
1576   operands in the expression tree EXPR.
1577
1578   NOTE: The statement pointed-to by GSI may be reallocated if it
1579   did not have enough operand slots.
1580
1581   This function is useful to convert an existing tree expression into
1582   the flat representation used for the RHS of a GIMPLE assignment.
1583   It will reallocate memory as needed to expand or shrink the number
1584   of operand slots needed to represent EXPR.
1585
1586   NOTE: If you find yourself building a tree and then calling this
1587   function, you are most certainly doing it the slow way.  It is much
1588   better to build a new assignment or to use the function
1589   gimple_assign_set_rhs_with_ops, which does not require an
1590   expression tree to be built.  */
1591
1592void
1593gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1594{
1595  enum tree_code subcode;
1596  tree op1, op2, op3;
1597
1598  extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3);
1599  gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3);
1600}
1601
1602
1603/* Set the RHS of assignment statement pointed-to by GSI to CODE with
1604   operands OP1, OP2 and OP3.
1605
1606   NOTE: The statement pointed-to by GSI may be reallocated if it
1607   did not have enough operand slots.  */
1608
1609void
1610gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
1611				tree op1, tree op2, tree op3)
1612{
1613  unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1614  gimple stmt = gsi_stmt (*gsi);
1615
1616  /* If the new CODE needs more operands, allocate a new statement.  */
1617  if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1618    {
1619      tree lhs = gimple_assign_lhs (stmt);
1620      gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1621      memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1622      gimple_init_singleton (new_stmt);
1623      gsi_replace (gsi, new_stmt, true);
1624      stmt = new_stmt;
1625
1626      /* The LHS needs to be reset as this also changes the SSA name
1627	 on the LHS.  */
1628      gimple_assign_set_lhs (stmt, lhs);
1629    }
1630
1631  gimple_set_num_ops (stmt, new_rhs_ops + 1);
1632  gimple_set_subcode (stmt, code);
1633  gimple_assign_set_rhs1 (stmt, op1);
1634  if (new_rhs_ops > 1)
1635    gimple_assign_set_rhs2 (stmt, op2);
1636  if (new_rhs_ops > 2)
1637    gimple_assign_set_rhs3 (stmt, op3);
1638}
1639
1640
1641/* Return the LHS of a statement that performs an assignment,
1642   either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
1643   for a call to a function that returns no value, or for a
1644   statement other than an assignment or a call.  */
1645
1646tree
1647gimple_get_lhs (const_gimple stmt)
1648{
1649  enum gimple_code code = gimple_code (stmt);
1650
1651  if (code == GIMPLE_ASSIGN)
1652    return gimple_assign_lhs (stmt);
1653  else if (code == GIMPLE_CALL)
1654    return gimple_call_lhs (stmt);
1655  else
1656    return NULL_TREE;
1657}
1658
1659
1660/* Set the LHS of a statement that performs an assignment,
1661   either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
1662
1663void
1664gimple_set_lhs (gimple stmt, tree lhs)
1665{
1666  enum gimple_code code = gimple_code (stmt);
1667
1668  if (code == GIMPLE_ASSIGN)
1669    gimple_assign_set_lhs (stmt, lhs);
1670  else if (code == GIMPLE_CALL)
1671    gimple_call_set_lhs (stmt, lhs);
1672  else
1673    gcc_unreachable ();
1674}
1675
1676
1677/* Return a deep copy of statement STMT.  All the operands from STMT
1678   are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
1679   and VUSE operand arrays are set to empty in the new copy.  The new
1680   copy isn't part of any sequence.  */
1681
1682gimple
1683gimple_copy (gimple stmt)
1684{
1685  enum gimple_code code = gimple_code (stmt);
1686  unsigned num_ops = gimple_num_ops (stmt);
1687  gimple copy = gimple_alloc (code, num_ops);
1688  unsigned i;
1689
1690  /* Shallow copy all the fields from STMT.  */
1691  memcpy (copy, stmt, gimple_size (code));
1692  gimple_init_singleton (copy);
1693
1694  /* If STMT has sub-statements, deep-copy them as well.  */
1695  if (gimple_has_substatements (stmt))
1696    {
1697      gimple_seq new_seq;
1698      tree t;
1699
1700      switch (gimple_code (stmt))
1701	{
1702	case GIMPLE_BIND:
1703	  {
1704	    gbind *bind_stmt = as_a <gbind *> (stmt);
1705	    gbind *bind_copy = as_a <gbind *> (copy);
1706	    new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
1707	    gimple_bind_set_body (bind_copy, new_seq);
1708	    gimple_bind_set_vars (bind_copy,
1709				  unshare_expr (gimple_bind_vars (bind_stmt)));
1710	    gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
1711	  }
1712	  break;
1713
1714	case GIMPLE_CATCH:
1715	  {
1716	    gcatch *catch_stmt = as_a <gcatch *> (stmt);
1717	    gcatch *catch_copy = as_a <gcatch *> (copy);
1718	    new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
1719	    gimple_catch_set_handler (catch_copy, new_seq);
1720	    t = unshare_expr (gimple_catch_types (catch_stmt));
1721	    gimple_catch_set_types (catch_copy, t);
1722	  }
1723	  break;
1724
1725	case GIMPLE_EH_FILTER:
1726	  {
1727	    geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
1728	    geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
1729	    new_seq
1730	      = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
1731	    gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
1732	    t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
1733	    gimple_eh_filter_set_types (eh_filter_copy, t);
1734	  }
1735	  break;
1736
1737	case GIMPLE_EH_ELSE:
1738	  {
1739	    geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
1740	    geh_else *eh_else_copy = as_a <geh_else *> (copy);
1741	    new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
1742	    gimple_eh_else_set_n_body (eh_else_copy, new_seq);
1743	    new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
1744	    gimple_eh_else_set_e_body (eh_else_copy, new_seq);
1745	  }
1746	  break;
1747
1748	case GIMPLE_TRY:
1749	  {
1750	    gtry *try_stmt = as_a <gtry *> (stmt);
1751	    gtry *try_copy = as_a <gtry *> (copy);
1752	    new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
1753	    gimple_try_set_eval (try_copy, new_seq);
1754	    new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
1755	    gimple_try_set_cleanup (try_copy, new_seq);
1756	  }
1757	  break;
1758
1759	case GIMPLE_OMP_FOR:
1760	  new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1761	  gimple_omp_for_set_pre_body (copy, new_seq);
1762	  t = unshare_expr (gimple_omp_for_clauses (stmt));
1763	  gimple_omp_for_set_clauses (copy, t);
1764	  {
1765	    gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
1766	    omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1767	      ( gimple_omp_for_collapse (stmt));
1768          }
1769	  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1770	    {
1771	      gimple_omp_for_set_cond (copy, i,
1772				       gimple_omp_for_cond (stmt, i));
1773	      gimple_omp_for_set_index (copy, i,
1774					gimple_omp_for_index (stmt, i));
1775	      t = unshare_expr (gimple_omp_for_initial (stmt, i));
1776	      gimple_omp_for_set_initial (copy, i, t);
1777	      t = unshare_expr (gimple_omp_for_final (stmt, i));
1778	      gimple_omp_for_set_final (copy, i, t);
1779	      t = unshare_expr (gimple_omp_for_incr (stmt, i));
1780	      gimple_omp_for_set_incr (copy, i, t);
1781	    }
1782	  goto copy_omp_body;
1783
1784	case GIMPLE_OMP_PARALLEL:
1785	  {
1786	    gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1787	    gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
1788	    t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
1789	    gimple_omp_parallel_set_clauses (omp_par_copy, t);
1790	    t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
1791	    gimple_omp_parallel_set_child_fn (omp_par_copy, t);
1792	    t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
1793	    gimple_omp_parallel_set_data_arg (omp_par_copy, t);
1794	  }
1795	  goto copy_omp_body;
1796
1797	case GIMPLE_OMP_TASK:
1798	  t = unshare_expr (gimple_omp_task_clauses (stmt));
1799	  gimple_omp_task_set_clauses (copy, t);
1800	  t = unshare_expr (gimple_omp_task_child_fn (stmt));
1801	  gimple_omp_task_set_child_fn (copy, t);
1802	  t = unshare_expr (gimple_omp_task_data_arg (stmt));
1803	  gimple_omp_task_set_data_arg (copy, t);
1804	  t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1805	  gimple_omp_task_set_copy_fn (copy, t);
1806	  t = unshare_expr (gimple_omp_task_arg_size (stmt));
1807	  gimple_omp_task_set_arg_size (copy, t);
1808	  t = unshare_expr (gimple_omp_task_arg_align (stmt));
1809	  gimple_omp_task_set_arg_align (copy, t);
1810	  goto copy_omp_body;
1811
1812	case GIMPLE_OMP_CRITICAL:
1813	  t = unshare_expr (gimple_omp_critical_name (
1814			      as_a <gomp_critical *> (stmt)));
1815	  gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
1816	  goto copy_omp_body;
1817
1818	case GIMPLE_OMP_SECTIONS:
1819	  t = unshare_expr (gimple_omp_sections_clauses (stmt));
1820	  gimple_omp_sections_set_clauses (copy, t);
1821	  t = unshare_expr (gimple_omp_sections_control (stmt));
1822	  gimple_omp_sections_set_control (copy, t);
1823	  /* FALLTHRU  */
1824
1825	case GIMPLE_OMP_SINGLE:
1826	case GIMPLE_OMP_TARGET:
1827	case GIMPLE_OMP_TEAMS:
1828	case GIMPLE_OMP_SECTION:
1829	case GIMPLE_OMP_MASTER:
1830	case GIMPLE_OMP_TASKGROUP:
1831	case GIMPLE_OMP_ORDERED:
1832	copy_omp_body:
1833	  new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1834	  gimple_omp_set_body (copy, new_seq);
1835	  break;
1836
1837	case GIMPLE_TRANSACTION:
1838	  new_seq = gimple_seq_copy (gimple_transaction_body (
1839				       as_a <gtransaction *> (stmt)));
1840	  gimple_transaction_set_body (as_a <gtransaction *> (copy),
1841				       new_seq);
1842	  break;
1843
1844	case GIMPLE_WITH_CLEANUP_EXPR:
1845	  new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1846	  gimple_wce_set_cleanup (copy, new_seq);
1847	  break;
1848
1849	default:
1850	  gcc_unreachable ();
1851	}
1852    }
1853
1854  /* Make copy of operands.  */
1855  for (i = 0; i < num_ops; i++)
1856    gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
1857
1858  if (gimple_has_mem_ops (stmt))
1859    {
1860      gimple_set_vdef (copy, gimple_vdef (stmt));
1861      gimple_set_vuse (copy, gimple_vuse (stmt));
1862    }
1863
1864  /* Clear out SSA operand vectors on COPY.  */
1865  if (gimple_has_ops (stmt))
1866    {
1867      gimple_set_use_ops (copy, NULL);
1868
1869      /* SSA operands need to be updated.  */
1870      gimple_set_modified (copy, true);
1871    }
1872
1873  return copy;
1874}
1875
1876
1877/* Return true if statement S has side-effects.  We consider a
1878   statement to have side effects if:
1879
1880   - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1881   - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
1882
1883bool
1884gimple_has_side_effects (const_gimple s)
1885{
1886  if (is_gimple_debug (s))
1887    return false;
1888
1889  /* We don't have to scan the arguments to check for
1890     volatile arguments, though, at present, we still
1891     do a scan to check for TREE_SIDE_EFFECTS.  */
1892  if (gimple_has_volatile_ops (s))
1893    return true;
1894
1895  if (gimple_code (s) == GIMPLE_ASM
1896      && gimple_asm_volatile_p (as_a <const gasm *> (s)))
1897    return true;
1898
1899  if (is_gimple_call (s))
1900    {
1901      int flags = gimple_call_flags (s);
1902
1903      /* An infinite loop is considered a side effect.  */
1904      if (!(flags & (ECF_CONST | ECF_PURE))
1905	  || (flags & ECF_LOOPING_CONST_OR_PURE))
1906	return true;
1907
1908      return false;
1909    }
1910
1911  return false;
1912}
1913
1914/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1915   Return true if S can trap.  When INCLUDE_MEM is true, check whether
1916   the memory operations could trap.  When INCLUDE_STORES is true and
1917   S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked.  */
1918
1919bool
1920gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
1921{
1922  tree t, div = NULL_TREE;
1923  enum tree_code op;
1924
1925  if (include_mem)
1926    {
1927      unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
1928
1929      for (i = start; i < gimple_num_ops (s); i++)
1930	if (tree_could_trap_p (gimple_op (s, i)))
1931	  return true;
1932    }
1933
1934  switch (gimple_code (s))
1935    {
1936    case GIMPLE_ASM:
1937      return gimple_asm_volatile_p (as_a <gasm *> (s));
1938
1939    case GIMPLE_CALL:
1940      t = gimple_call_fndecl (s);
1941      /* Assume that calls to weak functions may trap.  */
1942      if (!t || !DECL_P (t) || DECL_WEAK (t))
1943	return true;
1944      return false;
1945
1946    case GIMPLE_ASSIGN:
1947      t = gimple_expr_type (s);
1948      op = gimple_assign_rhs_code (s);
1949      if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
1950	div = gimple_assign_rhs2 (s);
1951      return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
1952				      (INTEGRAL_TYPE_P (t)
1953				       && TYPE_OVERFLOW_TRAPS (t)),
1954				      div));
1955
1956    case GIMPLE_COND:
1957      t = TREE_TYPE (gimple_cond_lhs (s));
1958      return operation_could_trap_p (gimple_cond_code (s),
1959				     FLOAT_TYPE_P (t), false, NULL_TREE);
1960
1961    default:
1962      break;
1963    }
1964
1965  return false;
1966}
1967
1968/* Return true if statement S can trap.  */
1969
1970bool
1971gimple_could_trap_p (gimple s)
1972{
1973  return gimple_could_trap_p_1 (s, true, true);
1974}
1975
1976/* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
1977
1978bool
1979gimple_assign_rhs_could_trap_p (gimple s)
1980{
1981  gcc_assert (is_gimple_assign (s));
1982  return gimple_could_trap_p_1 (s, true, false);
1983}
1984
1985
1986/* Print debugging information for gimple stmts generated.  */
1987
1988void
1989dump_gimple_statistics (void)
1990{
1991  int i, total_tuples = 0, total_bytes = 0;
1992
1993  if (! GATHER_STATISTICS)
1994    {
1995      fprintf (stderr, "No gimple statistics\n");
1996      return;
1997    }
1998
1999  fprintf (stderr, "\nGIMPLE statements\n");
2000  fprintf (stderr, "Kind                   Stmts      Bytes\n");
2001  fprintf (stderr, "---------------------------------------\n");
2002  for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2003    {
2004      fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2005	  gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2006      total_tuples += gimple_alloc_counts[i];
2007      total_bytes += gimple_alloc_sizes[i];
2008    }
2009  fprintf (stderr, "---------------------------------------\n");
2010  fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2011  fprintf (stderr, "---------------------------------------\n");
2012}
2013
2014
2015/* Return the number of operands needed on the RHS of a GIMPLE
2016   assignment for an expression with tree code CODE.  */
2017
2018unsigned
2019get_gimple_rhs_num_ops (enum tree_code code)
2020{
2021  enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2022
2023  if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2024    return 1;
2025  else if (rhs_class == GIMPLE_BINARY_RHS)
2026    return 2;
2027  else if (rhs_class == GIMPLE_TERNARY_RHS)
2028    return 3;
2029  else
2030    gcc_unreachable ();
2031}
2032
2033#define DEFTREECODE(SYM, STRING, TYPE, NARGS)   			    \
2034  (unsigned char)							    \
2035  ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS				    \
2036   : ((TYPE) == tcc_binary						    \
2037      || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS   		    \
2038   : ((TYPE) == tcc_constant						    \
2039      || (TYPE) == tcc_declaration					    \
2040      || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS			    \
2041   : ((SYM) == TRUTH_AND_EXPR						    \
2042      || (SYM) == TRUTH_OR_EXPR						    \
2043      || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS			    \
2044   : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS				    \
2045   : ((SYM) == COND_EXPR						    \
2046      || (SYM) == WIDEN_MULT_PLUS_EXPR					    \
2047      || (SYM) == WIDEN_MULT_MINUS_EXPR					    \
2048      || (SYM) == DOT_PROD_EXPR						    \
2049      || (SYM) == SAD_EXPR						    \
2050      || (SYM) == REALIGN_LOAD_EXPR					    \
2051      || (SYM) == VEC_COND_EXPR						    \
2052      || (SYM) == VEC_PERM_EXPR                                             \
2053      || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS			    \
2054   : ((SYM) == CONSTRUCTOR						    \
2055      || (SYM) == OBJ_TYPE_REF						    \
2056      || (SYM) == ASSERT_EXPR						    \
2057      || (SYM) == ADDR_EXPR						    \
2058      || (SYM) == WITH_SIZE_EXPR					    \
2059      || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS				    \
2060   : GIMPLE_INVALID_RHS),
2061#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2062
2063const unsigned char gimple_rhs_class_table[] = {
2064#include "all-tree.def"
2065};
2066
2067#undef DEFTREECODE
2068#undef END_OF_BASE_TREE_CODES
2069
2070/* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
2071   a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2072   we failed to create one.  */
2073
2074tree
2075canonicalize_cond_expr_cond (tree t)
2076{
2077  /* Strip conversions around boolean operations.  */
2078  if (CONVERT_EXPR_P (t)
2079      && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2080          || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2081	     == BOOLEAN_TYPE))
2082    t = TREE_OPERAND (t, 0);
2083
2084  /* For !x use x == 0.  */
2085  if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2086    {
2087      tree top0 = TREE_OPERAND (t, 0);
2088      t = build2 (EQ_EXPR, TREE_TYPE (t),
2089		  top0, build_int_cst (TREE_TYPE (top0), 0));
2090    }
2091  /* For cmp ? 1 : 0 use cmp.  */
2092  else if (TREE_CODE (t) == COND_EXPR
2093	   && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2094	   && integer_onep (TREE_OPERAND (t, 1))
2095	   && integer_zerop (TREE_OPERAND (t, 2)))
2096    {
2097      tree top0 = TREE_OPERAND (t, 0);
2098      t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2099		  TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2100    }
2101  /* For x ^ y use x != y.  */
2102  else if (TREE_CODE (t) == BIT_XOR_EXPR)
2103    t = build2 (NE_EXPR, TREE_TYPE (t),
2104		TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2105
2106  if (is_gimple_condexpr (t))
2107    return t;
2108
2109  return NULL_TREE;
2110}
2111
2112/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2113   the positions marked by the set ARGS_TO_SKIP.  */
2114
2115gcall *
2116gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
2117{
2118  int i;
2119  int nargs = gimple_call_num_args (stmt);
2120  auto_vec<tree> vargs (nargs);
2121  gcall *new_stmt;
2122
2123  for (i = 0; i < nargs; i++)
2124    if (!bitmap_bit_p (args_to_skip, i))
2125      vargs.quick_push (gimple_call_arg (stmt, i));
2126
2127  if (gimple_call_internal_p (stmt))
2128    new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2129					       vargs);
2130  else
2131    new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2132
2133  if (gimple_call_lhs (stmt))
2134    gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2135
2136  gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2137  gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2138
2139  if (gimple_has_location (stmt))
2140    gimple_set_location (new_stmt, gimple_location (stmt));
2141  gimple_call_copy_flags (new_stmt, stmt);
2142  gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2143
2144  gimple_set_modified (new_stmt, true);
2145
2146  return new_stmt;
2147}
2148
2149
2150
2151/* Return true if the field decls F1 and F2 are at the same offset.
2152
2153   This is intended to be used on GIMPLE types only.  */
2154
2155bool
2156gimple_compare_field_offset (tree f1, tree f2)
2157{
2158  if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2159    {
2160      tree offset1 = DECL_FIELD_OFFSET (f1);
2161      tree offset2 = DECL_FIELD_OFFSET (f2);
2162      return ((offset1 == offset2
2163	       /* Once gimplification is done, self-referential offsets are
2164		  instantiated as operand #2 of the COMPONENT_REF built for
2165		  each access and reset.  Therefore, they are not relevant
2166		  anymore and fields are interchangeable provided that they
2167		  represent the same access.  */
2168	       || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2169		   && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2170		   && (DECL_SIZE (f1) == DECL_SIZE (f2)
2171		       || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2172			   && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2173		       || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2174		   && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2175	       || operand_equal_p (offset1, offset2, 0))
2176	      && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2177				     DECL_FIELD_BIT_OFFSET (f2)));
2178    }
2179
2180  /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2181     should be, so handle differing ones specially by decomposing
2182     the offset into a byte and bit offset manually.  */
2183  if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2184      && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2185    {
2186      unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2187      unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2188      bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2189      byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2190		      + bit_offset1 / BITS_PER_UNIT);
2191      bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2192      byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2193		      + bit_offset2 / BITS_PER_UNIT);
2194      if (byte_offset1 != byte_offset2)
2195	return false;
2196      return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2197    }
2198
2199  return false;
2200}
2201
2202
2203/* Return a type the same as TYPE except unsigned or
2204   signed according to UNSIGNEDP.  */
2205
2206static tree
2207gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2208{
2209  tree type1;
2210  int i;
2211
2212  type1 = TYPE_MAIN_VARIANT (type);
2213  if (type1 == signed_char_type_node
2214      || type1 == char_type_node
2215      || type1 == unsigned_char_type_node)
2216    return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2217  if (type1 == integer_type_node || type1 == unsigned_type_node)
2218    return unsignedp ? unsigned_type_node : integer_type_node;
2219  if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2220    return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2221  if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2222    return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2223  if (type1 == long_long_integer_type_node
2224      || type1 == long_long_unsigned_type_node)
2225    return unsignedp
2226           ? long_long_unsigned_type_node
2227	   : long_long_integer_type_node;
2228
2229  for (i = 0; i < NUM_INT_N_ENTS; i ++)
2230    if (int_n_enabled_p[i]
2231	&& (type1 == int_n_trees[i].unsigned_type
2232	    || type1 == int_n_trees[i].signed_type))
2233	return unsignedp
2234	  ? int_n_trees[i].unsigned_type
2235	  : int_n_trees[i].signed_type;
2236
2237#if HOST_BITS_PER_WIDE_INT >= 64
2238  if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2239    return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2240#endif
2241  if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2242    return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2243  if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2244    return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2245  if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2246    return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2247  if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2248    return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2249
2250#define GIMPLE_FIXED_TYPES(NAME)	    \
2251  if (type1 == short_ ## NAME ## _type_node \
2252      || type1 == unsigned_short_ ## NAME ## _type_node) \
2253    return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2254		     : short_ ## NAME ## _type_node; \
2255  if (type1 == NAME ## _type_node \
2256      || type1 == unsigned_ ## NAME ## _type_node) \
2257    return unsignedp ? unsigned_ ## NAME ## _type_node \
2258		     : NAME ## _type_node; \
2259  if (type1 == long_ ## NAME ## _type_node \
2260      || type1 == unsigned_long_ ## NAME ## _type_node) \
2261    return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2262		     : long_ ## NAME ## _type_node; \
2263  if (type1 == long_long_ ## NAME ## _type_node \
2264      || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2265    return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2266		     : long_long_ ## NAME ## _type_node;
2267
2268#define GIMPLE_FIXED_MODE_TYPES(NAME) \
2269  if (type1 == NAME ## _type_node \
2270      || type1 == u ## NAME ## _type_node) \
2271    return unsignedp ? u ## NAME ## _type_node \
2272		     : NAME ## _type_node;
2273
2274#define GIMPLE_FIXED_TYPES_SAT(NAME) \
2275  if (type1 == sat_ ## short_ ## NAME ## _type_node \
2276      || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2277    return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2278		     : sat_ ## short_ ## NAME ## _type_node; \
2279  if (type1 == sat_ ## NAME ## _type_node \
2280      || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2281    return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2282		     : sat_ ## NAME ## _type_node; \
2283  if (type1 == sat_ ## long_ ## NAME ## _type_node \
2284      || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2285    return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2286		     : sat_ ## long_ ## NAME ## _type_node; \
2287  if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2288      || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2289    return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2290		     : sat_ ## long_long_ ## NAME ## _type_node;
2291
2292#define GIMPLE_FIXED_MODE_TYPES_SAT(NAME)	\
2293  if (type1 == sat_ ## NAME ## _type_node \
2294      || type1 == sat_ ## u ## NAME ## _type_node) \
2295    return unsignedp ? sat_ ## u ## NAME ## _type_node \
2296		     : sat_ ## NAME ## _type_node;
2297
2298  GIMPLE_FIXED_TYPES (fract);
2299  GIMPLE_FIXED_TYPES_SAT (fract);
2300  GIMPLE_FIXED_TYPES (accum);
2301  GIMPLE_FIXED_TYPES_SAT (accum);
2302
2303  GIMPLE_FIXED_MODE_TYPES (qq);
2304  GIMPLE_FIXED_MODE_TYPES (hq);
2305  GIMPLE_FIXED_MODE_TYPES (sq);
2306  GIMPLE_FIXED_MODE_TYPES (dq);
2307  GIMPLE_FIXED_MODE_TYPES (tq);
2308  GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2309  GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2310  GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2311  GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2312  GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2313  GIMPLE_FIXED_MODE_TYPES (ha);
2314  GIMPLE_FIXED_MODE_TYPES (sa);
2315  GIMPLE_FIXED_MODE_TYPES (da);
2316  GIMPLE_FIXED_MODE_TYPES (ta);
2317  GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2318  GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2319  GIMPLE_FIXED_MODE_TYPES_SAT (da);
2320  GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2321
2322  /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2323     the precision; they have precision set to match their range, but
2324     may use a wider mode to match an ABI.  If we change modes, we may
2325     wind up with bad conversions.  For INTEGER_TYPEs in C, must check
2326     the precision as well, so as to yield correct results for
2327     bit-field types.  C++ does not have these separate bit-field
2328     types, and producing a signed or unsigned variant of an
2329     ENUMERAL_TYPE may cause other problems as well.  */
2330  if (!INTEGRAL_TYPE_P (type)
2331      || TYPE_UNSIGNED (type) == unsignedp)
2332    return type;
2333
2334#define TYPE_OK(node)							    \
2335  (TYPE_MODE (type) == TYPE_MODE (node)					    \
2336   && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2337  if (TYPE_OK (signed_char_type_node))
2338    return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2339  if (TYPE_OK (integer_type_node))
2340    return unsignedp ? unsigned_type_node : integer_type_node;
2341  if (TYPE_OK (short_integer_type_node))
2342    return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2343  if (TYPE_OK (long_integer_type_node))
2344    return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2345  if (TYPE_OK (long_long_integer_type_node))
2346    return (unsignedp
2347	    ? long_long_unsigned_type_node
2348	    : long_long_integer_type_node);
2349
2350  for (i = 0; i < NUM_INT_N_ENTS; i ++)
2351    if (int_n_enabled_p[i]
2352	&& TYPE_MODE (type) == int_n_data[i].m
2353	&& TYPE_PRECISION (type) == int_n_data[i].bitsize)
2354	return unsignedp
2355	  ? int_n_trees[i].unsigned_type
2356	  : int_n_trees[i].signed_type;
2357
2358#if HOST_BITS_PER_WIDE_INT >= 64
2359  if (TYPE_OK (intTI_type_node))
2360    return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2361#endif
2362  if (TYPE_OK (intDI_type_node))
2363    return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2364  if (TYPE_OK (intSI_type_node))
2365    return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2366  if (TYPE_OK (intHI_type_node))
2367    return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2368  if (TYPE_OK (intQI_type_node))
2369    return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2370
2371#undef GIMPLE_FIXED_TYPES
2372#undef GIMPLE_FIXED_MODE_TYPES
2373#undef GIMPLE_FIXED_TYPES_SAT
2374#undef GIMPLE_FIXED_MODE_TYPES_SAT
2375#undef TYPE_OK
2376
2377  return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2378}
2379
2380
2381/* Return an unsigned type the same as TYPE in other respects.  */
2382
2383tree
2384gimple_unsigned_type (tree type)
2385{
2386  return gimple_signed_or_unsigned_type (true, type);
2387}
2388
2389
2390/* Return a signed type the same as TYPE in other respects.  */
2391
2392tree
2393gimple_signed_type (tree type)
2394{
2395  return gimple_signed_or_unsigned_type (false, type);
2396}
2397
2398
2399/* Return the typed-based alias set for T, which may be an expression
2400   or a type.  Return -1 if we don't do anything special.  */
2401
2402alias_set_type
2403gimple_get_alias_set (tree t)
2404{
2405  tree u;
2406
2407  /* Permit type-punning when accessing a union, provided the access
2408     is directly through the union.  For example, this code does not
2409     permit taking the address of a union member and then storing
2410     through it.  Even the type-punning allowed here is a GCC
2411     extension, albeit a common and useful one; the C standard says
2412     that such accesses have implementation-defined behavior.  */
2413  for (u = t;
2414       TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
2415       u = TREE_OPERAND (u, 0))
2416    if (TREE_CODE (u) == COMPONENT_REF
2417	&& TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
2418      return 0;
2419
2420  /* That's all the expressions we handle specially.  */
2421  if (!TYPE_P (t))
2422    return -1;
2423
2424  /* For convenience, follow the C standard when dealing with
2425     character types.  Any object may be accessed via an lvalue that
2426     has character type.  */
2427  if (t == char_type_node
2428      || t == signed_char_type_node
2429      || t == unsigned_char_type_node)
2430    return 0;
2431
2432  /* Allow aliasing between signed and unsigned variants of the same
2433     type.  We treat the signed variant as canonical.  */
2434  if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2435    {
2436      tree t1 = gimple_signed_type (t);
2437
2438      /* t1 == t can happen for boolean nodes which are always unsigned.  */
2439      if (t1 != t)
2440	return get_alias_set (t1);
2441    }
2442
2443  return -1;
2444}
2445
2446
2447/* Helper for gimple_ior_addresses_taken_1.  */
2448
2449static bool
2450gimple_ior_addresses_taken_1 (gimple, tree addr, tree, void *data)
2451{
2452  bitmap addresses_taken = (bitmap)data;
2453  addr = get_base_address (addr);
2454  if (addr
2455      && DECL_P (addr))
2456    {
2457      bitmap_set_bit (addresses_taken, DECL_UID (addr));
2458      return true;
2459    }
2460  return false;
2461}
2462
2463/* Set the bit for the uid of all decls that have their address taken
2464   in STMT in the ADDRESSES_TAKEN bitmap.  Returns true if there
2465   were any in this stmt.  */
2466
2467bool
2468gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
2469{
2470  return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2471					gimple_ior_addresses_taken_1);
2472}
2473
2474
2475/* Return true if TYPE1 and TYPE2 are compatible enough for builtin
2476   processing.  */
2477
2478static bool
2479validate_type (tree type1, tree type2)
2480{
2481  if (INTEGRAL_TYPE_P (type1)
2482      && INTEGRAL_TYPE_P (type2))
2483    ;
2484  else if (POINTER_TYPE_P (type1)
2485	   && POINTER_TYPE_P (type2))
2486    ;
2487  else if (TREE_CODE (type1)
2488	   != TREE_CODE (type2))
2489    return false;
2490  return true;
2491}
2492
2493/* Return true when STMTs arguments and return value match those of FNDECL,
2494   a decl of a builtin function.  */
2495
2496bool
2497gimple_builtin_call_types_compatible_p (const_gimple stmt, tree fndecl)
2498{
2499  gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2500
2501  tree ret = gimple_call_lhs (stmt);
2502  if (ret
2503      && !validate_type (TREE_TYPE (ret), TREE_TYPE (TREE_TYPE (fndecl))))
2504    return false;
2505
2506  tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2507  unsigned nargs = gimple_call_num_args (stmt);
2508  for (unsigned i = 0; i < nargs; ++i)
2509    {
2510      /* Variadic args follow.  */
2511      if (!targs)
2512	return true;
2513      tree arg = gimple_call_arg (stmt, i);
2514      if (!validate_type (TREE_TYPE (arg), TREE_VALUE (targs)))
2515	return false;
2516      targs = TREE_CHAIN (targs);
2517    }
2518  if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2519    return false;
2520  return true;
2521}
2522
2523/* Return true when STMT is builtins call.  */
2524
2525bool
2526gimple_call_builtin_p (const_gimple stmt)
2527{
2528  tree fndecl;
2529  if (is_gimple_call (stmt)
2530      && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2531      && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2532    return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2533  return false;
2534}
2535
2536/* Return true when STMT is builtins call to CLASS.  */
2537
2538bool
2539gimple_call_builtin_p (const_gimple stmt, enum built_in_class klass)
2540{
2541  tree fndecl;
2542  if (is_gimple_call (stmt)
2543      && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2544      && DECL_BUILT_IN_CLASS (fndecl) == klass)
2545    return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2546  return false;
2547}
2548
2549/* Return true when STMT is builtins call to CODE of CLASS.  */
2550
2551bool
2552gimple_call_builtin_p (const_gimple stmt, enum built_in_function code)
2553{
2554  tree fndecl;
2555  if (is_gimple_call (stmt)
2556      && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2557      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2558      && DECL_FUNCTION_CODE (fndecl) == code)
2559    return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2560  return false;
2561}
2562
2563/* Return true if STMT clobbers memory.  STMT is required to be a
2564   GIMPLE_ASM.  */
2565
2566bool
2567gimple_asm_clobbers_memory_p (const gasm *stmt)
2568{
2569  unsigned i;
2570
2571  for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2572    {
2573      tree op = gimple_asm_clobber_op (stmt, i);
2574      if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2575	return true;
2576    }
2577
2578  return false;
2579}
2580
2581/* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE.  */
2582
2583void
2584dump_decl_set (FILE *file, bitmap set)
2585{
2586  if (set)
2587    {
2588      bitmap_iterator bi;
2589      unsigned i;
2590
2591      fprintf (file, "{ ");
2592
2593      EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2594	{
2595	  fprintf (file, "D.%u", i);
2596	  fprintf (file, " ");
2597	}
2598
2599      fprintf (file, "}");
2600    }
2601  else
2602    fprintf (file, "NIL");
2603}
2604
2605/* Return true when CALL is a call stmt that definitely doesn't
2606   free any memory or makes it unavailable otherwise.  */
2607bool
2608nonfreeing_call_p (gimple call)
2609{
2610  if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2611      && gimple_call_flags (call) & ECF_LEAF)
2612    switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2613      {
2614	/* Just in case these become ECF_LEAF in the future.  */
2615	case BUILT_IN_FREE:
2616	case BUILT_IN_TM_FREE:
2617	case BUILT_IN_REALLOC:
2618	case BUILT_IN_STACK_RESTORE:
2619	  return false;
2620	default:
2621	  return true;
2622      }
2623  else if (gimple_call_internal_p (call))
2624    switch (gimple_call_internal_fn (call))
2625      {
2626      case IFN_ABNORMAL_DISPATCHER:
2627        return true;
2628      default:
2629	if (gimple_call_flags (call) & ECF_LEAF)
2630	  return true;
2631	return false;
2632      }
2633
2634  tree fndecl = gimple_call_fndecl (call);
2635  if (!fndecl)
2636    return false;
2637  struct cgraph_node *n = cgraph_node::get (fndecl);
2638  if (!n)
2639    return false;
2640  enum availability availability;
2641  n = n->function_symbol (&availability);
2642  if (!n || availability <= AVAIL_INTERPOSABLE)
2643    return false;
2644  return n->nonfreeing_fn;
2645}
2646
2647/* Callback for walk_stmt_load_store_ops.
2648
2649   Return TRUE if OP will dereference the tree stored in DATA, FALSE
2650   otherwise.
2651
2652   This routine only makes a superficial check for a dereference.  Thus
2653   it must only be used if it is safe to return a false negative.  */
2654static bool
2655check_loadstore (gimple, tree op, tree, void *data)
2656{
2657  if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2658      && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
2659    return true;
2660  return false;
2661}
2662
2663/* If OP can be inferred to be non-NULL after STMT executes, return true.
2664
2665   DEREFERENCE is TRUE if we can use a pointer dereference to infer a
2666   non-NULL range, FALSE otherwise.
2667
2668   ATTRIBUTE is TRUE if we can use attributes to infer a non-NULL range
2669   for function arguments and return values.  FALSE otherwise.  */
2670
2671bool
2672infer_nonnull_range (gimple stmt, tree op, bool dereference, bool attribute)
2673{
2674  /* We can only assume that a pointer dereference will yield
2675     non-NULL if -fdelete-null-pointer-checks is enabled.  */
2676  if (!flag_delete_null_pointer_checks
2677      || !POINTER_TYPE_P (TREE_TYPE (op))
2678      || gimple_code (stmt) == GIMPLE_ASM)
2679    return false;
2680
2681  if (dereference
2682      && walk_stmt_load_store_ops (stmt, (void *)op,
2683				   check_loadstore, check_loadstore))
2684    return true;
2685
2686  if (attribute
2687      && is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2688    {
2689      tree fntype = gimple_call_fntype (stmt);
2690      tree attrs = TYPE_ATTRIBUTES (fntype);
2691      for (; attrs; attrs = TREE_CHAIN (attrs))
2692	{
2693	  attrs = lookup_attribute ("nonnull", attrs);
2694
2695	  /* If "nonnull" wasn't specified, we know nothing about
2696	     the argument.  */
2697	  if (attrs == NULL_TREE)
2698	    return false;
2699
2700	  /* If "nonnull" applies to all the arguments, then ARG
2701	     is non-null if it's in the argument list.  */
2702	  if (TREE_VALUE (attrs) == NULL_TREE)
2703	    {
2704	      for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2705		{
2706		  if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
2707		      && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
2708		    return true;
2709		}
2710	      return false;
2711	    }
2712
2713	  /* Now see if op appears in the nonnull list.  */
2714	  for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2715	    {
2716	      int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2717	      tree arg = gimple_call_arg (stmt, idx);
2718	      if (operand_equal_p (op, arg, 0))
2719		return true;
2720	    }
2721	}
2722    }
2723
2724  /* If this function is marked as returning non-null, then we can
2725     infer OP is non-null if it is used in the return statement.  */
2726  if (attribute)
2727    if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2728      if (gimple_return_retval (return_stmt)
2729	  && operand_equal_p (gimple_return_retval (return_stmt), op, 0)
2730	  && lookup_attribute ("returns_nonnull",
2731			       TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2732	return true;
2733
2734  return false;
2735}
2736
2737/* Compare two case labels.  Because the front end should already have
2738   made sure that case ranges do not overlap, it is enough to only compare
2739   the CASE_LOW values of each case label.  */
2740
2741static int
2742compare_case_labels (const void *p1, const void *p2)
2743{
2744  const_tree const case1 = *(const_tree const*)p1;
2745  const_tree const case2 = *(const_tree const*)p2;
2746
2747  /* The 'default' case label always goes first.  */
2748  if (!CASE_LOW (case1))
2749    return -1;
2750  else if (!CASE_LOW (case2))
2751    return 1;
2752  else
2753    return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2754}
2755
2756/* Sort the case labels in LABEL_VEC in place in ascending order.  */
2757
2758void
2759sort_case_labels (vec<tree> label_vec)
2760{
2761  label_vec.qsort (compare_case_labels);
2762}
2763
2764/* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2765
2766   LABELS is a vector that contains all case labels to look at.
2767
2768   INDEX_TYPE is the type of the switch index expression.  Case labels
2769   in LABELS are discarded if their values are not in the value range
2770   covered by INDEX_TYPE.  The remaining case label values are folded
2771   to INDEX_TYPE.
2772
2773   If a default case exists in LABELS, it is removed from LABELS and
2774   returned in DEFAULT_CASEP.  If no default case exists, but the
2775   case labels already cover the whole range of INDEX_TYPE, a default
2776   case is returned pointing to one of the existing case labels.
2777   Otherwise DEFAULT_CASEP is set to NULL_TREE.
2778
2779   DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2780   apply and no action is taken regardless of whether a default case is
2781   found or not.  */
2782
2783void
2784preprocess_case_label_vec_for_gimple (vec<tree> labels,
2785				      tree index_type,
2786				      tree *default_casep)
2787{
2788  tree min_value, max_value;
2789  tree default_case = NULL_TREE;
2790  size_t i, len;
2791
2792  i = 0;
2793  min_value = TYPE_MIN_VALUE (index_type);
2794  max_value = TYPE_MAX_VALUE (index_type);
2795  while (i < labels.length ())
2796    {
2797      tree elt = labels[i];
2798      tree low = CASE_LOW (elt);
2799      tree high = CASE_HIGH (elt);
2800      bool remove_element = FALSE;
2801
2802      if (low)
2803	{
2804	  gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2805	  gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2806
2807	  /* This is a non-default case label, i.e. it has a value.
2808
2809	     See if the case label is reachable within the range of
2810	     the index type.  Remove out-of-range case values.  Turn
2811	     case ranges into a canonical form (high > low strictly)
2812	     and convert the case label values to the index type.
2813
2814	     NB: The type of gimple_switch_index() may be the promoted
2815	     type, but the case labels retain the original type.  */
2816
2817	  if (high)
2818	    {
2819	      /* This is a case range.  Discard empty ranges.
2820		 If the bounds or the range are equal, turn this
2821		 into a simple (one-value) case.  */
2822	      int cmp = tree_int_cst_compare (high, low);
2823	      if (cmp < 0)
2824		remove_element = TRUE;
2825	      else if (cmp == 0)
2826		high = NULL_TREE;
2827	    }
2828
2829	  if (! high)
2830	    {
2831	      /* If the simple case value is unreachable, ignore it.  */
2832	      if ((TREE_CODE (min_value) == INTEGER_CST
2833		   && tree_int_cst_compare (low, min_value) < 0)
2834		  || (TREE_CODE (max_value) == INTEGER_CST
2835		      && tree_int_cst_compare (low, max_value) > 0))
2836		remove_element = TRUE;
2837	      else
2838		low = fold_convert (index_type, low);
2839	    }
2840	  else
2841	    {
2842	      /* If the entire case range is unreachable, ignore it.  */
2843	      if ((TREE_CODE (min_value) == INTEGER_CST
2844		   && tree_int_cst_compare (high, min_value) < 0)
2845		  || (TREE_CODE (max_value) == INTEGER_CST
2846		      && tree_int_cst_compare (low, max_value) > 0))
2847		remove_element = TRUE;
2848	      else
2849		{
2850		  /* If the lower bound is less than the index type's
2851		     minimum value, truncate the range bounds.  */
2852		  if (TREE_CODE (min_value) == INTEGER_CST
2853		      && tree_int_cst_compare (low, min_value) < 0)
2854		    low = min_value;
2855		  low = fold_convert (index_type, low);
2856
2857		  /* If the upper bound is greater than the index type's
2858		     maximum value, truncate the range bounds.  */
2859		  if (TREE_CODE (max_value) == INTEGER_CST
2860		      && tree_int_cst_compare (high, max_value) > 0)
2861		    high = max_value;
2862		  high = fold_convert (index_type, high);
2863
2864		  /* We may have folded a case range to a one-value case.  */
2865		  if (tree_int_cst_equal (low, high))
2866		    high = NULL_TREE;
2867		}
2868	    }
2869
2870	  CASE_LOW (elt) = low;
2871	  CASE_HIGH (elt) = high;
2872	}
2873      else
2874	{
2875	  gcc_assert (!default_case);
2876	  default_case = elt;
2877	  /* The default case must be passed separately to the
2878	     gimple_build_switch routine.  But if DEFAULT_CASEP
2879	     is NULL, we do not remove the default case (it would
2880	     be completely lost).  */
2881	  if (default_casep)
2882	    remove_element = TRUE;
2883	}
2884
2885      if (remove_element)
2886	labels.ordered_remove (i);
2887      else
2888	i++;
2889    }
2890  len = i;
2891
2892  if (!labels.is_empty ())
2893    sort_case_labels (labels);
2894
2895  if (default_casep && !default_case)
2896    {
2897      /* If the switch has no default label, add one, so that we jump
2898	 around the switch body.  If the labels already cover the whole
2899	 range of the switch index_type, add the default label pointing
2900	 to one of the existing labels.  */
2901      if (len
2902	  && TYPE_MIN_VALUE (index_type)
2903	  && TYPE_MAX_VALUE (index_type)
2904	  && tree_int_cst_equal (CASE_LOW (labels[0]),
2905				 TYPE_MIN_VALUE (index_type)))
2906	{
2907	  tree low, high = CASE_HIGH (labels[len - 1]);
2908	  if (!high)
2909	    high = CASE_LOW (labels[len - 1]);
2910	  if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
2911	    {
2912	      for (i = 1; i < len; i++)
2913		{
2914		  high = CASE_LOW (labels[i]);
2915		  low = CASE_HIGH (labels[i - 1]);
2916		  if (!low)
2917		    low = CASE_LOW (labels[i - 1]);
2918		  if (wi::add (low, 1) != high)
2919		    break;
2920		}
2921	      if (i == len)
2922		{
2923		  tree label = CASE_LABEL (labels[0]);
2924		  default_case = build_case_label (NULL_TREE, NULL_TREE,
2925						   label);
2926		}
2927	    }
2928	}
2929    }
2930
2931  if (default_casep)
2932    *default_casep = default_case;
2933}
2934
2935/* Set the location of all statements in SEQ to LOC.  */
2936
2937void
2938gimple_seq_set_location (gimple_seq seq, location_t loc)
2939{
2940  for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2941    gimple_set_location (gsi_stmt (i), loc);
2942}
2943
2944/* Release SSA_NAMEs in SEQ as well as the GIMPLE statements.  */
2945
2946void
2947gimple_seq_discard (gimple_seq seq)
2948{
2949  gimple_stmt_iterator gsi;
2950
2951  for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
2952    {
2953      gimple stmt = gsi_stmt (gsi);
2954      gsi_remove (&gsi, true);
2955      release_defs (stmt);
2956      ggc_free (stmt);
2957    }
2958}
2959
2960/* See if STMT now calls function that takes no parameters and if so, drop
2961   call arguments.  This is used when devirtualization machinery redirects
2962   to __builtiln_unreacahble or __cxa_pure_virutal.  */
2963
2964void
2965maybe_remove_unused_call_args (struct function *fn, gimple stmt)
2966{
2967  tree decl = gimple_call_fndecl (stmt);
2968  if (TYPE_ARG_TYPES (TREE_TYPE (decl))
2969      && TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))) == void_type_node
2970      && gimple_call_num_args (stmt))
2971    {
2972      gimple_set_num_ops (stmt, 3);
2973      update_stmt_fn (fn, stmt);
2974    }
2975}
2976