1/* Optimize and expand sanitizer functions.
2   Copyright (C) 2014-2015 Free Software Foundation, Inc.
3   Contributed by Marek Polacek <polacek@redhat.com>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "hash-set.h"
25#include "machmode.h"
26#include "vec.h"
27#include "double-int.h"
28#include "input.h"
29#include "alias.h"
30#include "symtab.h"
31#include "options.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "fold-const.h"
36#include "hash-table.h"
37#include "predict.h"
38#include "tm.h"
39#include "hard-reg-set.h"
40#include "function.h"
41#include "dominance.h"
42#include "cfg.h"
43#include "basic-block.h"
44#include "tree-ssa-alias.h"
45#include "internal-fn.h"
46#include "gimple-expr.h"
47#include "is-a.h"
48#include "gimple.h"
49#include "gimplify.h"
50#include "gimple-iterator.h"
51#include "hash-map.h"
52#include "plugin-api.h"
53#include "tree-pass.h"
54#include "asan.h"
55#include "gimple-pretty-print.h"
56#include "tm_p.h"
57#include "langhooks.h"
58#include "ubsan.h"
59#include "params.h"
60#include "tree-ssa-operands.h"
61
62
63/* This is used to carry information about basic blocks.  It is
64   attached to the AUX field of the standard CFG block.  */
65
66struct sanopt_info
67{
68  /* True if this BB might call (directly or indirectly) free/munmap
69     or similar operation.  */
70  bool has_freeing_call_p;
71
72  /* True if HAS_FREEING_CALL_P flag has been computed.  */
73  bool has_freeing_call_computed_p;
74
75  /* True if there is a block with HAS_FREEING_CALL_P flag set
76     on any path between an immediate dominator of BB, denoted
77     imm(BB), and BB.  */
78  bool imm_dom_path_with_freeing_call_p;
79
80  /* True if IMM_DOM_PATH_WITH_FREEING_CALL_P has been computed.  */
81  bool imm_dom_path_with_freeing_call_computed_p;
82
83  /* Number of possibly freeing calls encountered in this bb
84     (so far).  */
85  uint64_t freeing_call_events;
86
87  /* True if BB is currently being visited during computation
88     of IMM_DOM_PATH_WITH_FREEING_CALL_P flag.  */
89  bool being_visited_p;
90
91  /* True if this BB has been visited in the dominator walk.  */
92  bool visited_p;
93};
94
95/* If T has a single definition of form T = T2, return T2.  */
96
97static tree
98maybe_get_single_definition (tree t)
99{
100  if (TREE_CODE (t) == SSA_NAME)
101    {
102      gimple g = SSA_NAME_DEF_STMT (t);
103      if (gimple_assign_single_p (g))
104	return gimple_assign_rhs1 (g);
105    }
106  return NULL_TREE;
107}
108
109/* Traits class for tree hash maps below.  */
110
111struct sanopt_tree_map_traits : default_hashmap_traits
112{
113  static inline hashval_t hash (const_tree ref)
114  {
115    return iterative_hash_expr (ref, 0);
116  }
117
118  static inline bool equal_keys (const_tree ref1, const_tree ref2)
119  {
120    return operand_equal_p (ref1, ref2, 0);
121  }
122};
123
124/* Tree triplet for vptr_check_map.  */
125struct sanopt_tree_triplet
126{
127  tree t1, t2, t3;
128};
129
130/* Traits class for tree triplet hash maps below.  */
131
132struct sanopt_tree_triplet_map_traits : default_hashmap_traits
133{
134  static inline hashval_t
135  hash (const sanopt_tree_triplet &ref)
136  {
137    inchash::hash hstate (0);
138    inchash::add_expr (ref.t1, hstate);
139    inchash::add_expr (ref.t2, hstate);
140    inchash::add_expr (ref.t3, hstate);
141    return hstate.end ();
142  }
143
144  static inline bool
145  equal_keys (const sanopt_tree_triplet &ref1, const sanopt_tree_triplet &ref2)
146  {
147    return operand_equal_p (ref1.t1, ref2.t1, 0)
148	   && operand_equal_p (ref1.t2, ref2.t2, 0)
149	   && operand_equal_p (ref1.t3, ref2.t3, 0);
150  }
151
152  template<typename T>
153  static inline void
154  mark_deleted (T &e)
155  {
156    e.m_key.t1 = reinterpret_cast<T *> (1);
157  }
158
159  template<typename T>
160  static inline void
161  mark_empty (T &e)
162  {
163    e.m_key.t1 = NULL;
164  }
165
166  template<typename T>
167  static inline bool
168  is_deleted (T &e)
169  {
170    return e.m_key.t1 == (void *) 1;
171  }
172
173  template<typename T>
174  static inline bool
175  is_empty (T &e)
176  {
177    return e.m_key.t1 == NULL;
178  }
179};
180
181/* This is used to carry various hash maps and variables used
182   in sanopt_optimize_walker.  */
183
184struct sanopt_ctx
185{
186  /* This map maps a pointer (the first argument of UBSAN_NULL) to
187     a vector of UBSAN_NULL call statements that check this pointer.  */
188  hash_map<tree, auto_vec<gimple> > null_check_map;
189
190  /* This map maps a pointer (the second argument of ASAN_CHECK) to
191     a vector of ASAN_CHECK call statements that check the access.  */
192  hash_map<tree, auto_vec<gimple>, sanopt_tree_map_traits> asan_check_map;
193
194  /* This map maps a tree triplet (the first, second and fourth argument
195     of UBSAN_VPTR) to a vector of UBSAN_VPTR call statements that check
196     that virtual table pointer.  */
197  hash_map<sanopt_tree_triplet, auto_vec<gimple>,
198	   sanopt_tree_triplet_map_traits> vptr_check_map;
199
200  /* Number of IFN_ASAN_CHECK statements.  */
201  int asan_num_accesses;
202};
203
204
205/* Return true if there might be any call to free/munmap operation
206   on any path in between DOM (which should be imm(BB)) and BB.  */
207
208static bool
209imm_dom_path_with_freeing_call (basic_block bb, basic_block dom)
210{
211  sanopt_info *info = (sanopt_info *) bb->aux;
212  edge e;
213  edge_iterator ei;
214
215  if (info->imm_dom_path_with_freeing_call_computed_p)
216    return info->imm_dom_path_with_freeing_call_p;
217
218  info->being_visited_p = true;
219
220  FOR_EACH_EDGE (e, ei, bb->preds)
221    {
222      sanopt_info *pred_info = (sanopt_info *) e->src->aux;
223
224      if (e->src == dom)
225	continue;
226
227      if ((pred_info->imm_dom_path_with_freeing_call_computed_p
228	  && pred_info->imm_dom_path_with_freeing_call_p)
229	  || (pred_info->has_freeing_call_computed_p
230	      && pred_info->has_freeing_call_p))
231	{
232	  info->imm_dom_path_with_freeing_call_computed_p = true;
233	  info->imm_dom_path_with_freeing_call_p = true;
234	  info->being_visited_p = false;
235	  return true;
236	}
237    }
238
239  FOR_EACH_EDGE (e, ei, bb->preds)
240    {
241      sanopt_info *pred_info = (sanopt_info *) e->src->aux;
242
243      if (e->src == dom)
244	continue;
245
246      if (pred_info->has_freeing_call_computed_p)
247	continue;
248
249      gimple_stmt_iterator gsi;
250      for (gsi = gsi_start_bb (e->src); !gsi_end_p (gsi); gsi_next (&gsi))
251	{
252	  gimple stmt = gsi_stmt (gsi);
253
254	  if (is_gimple_call (stmt) && !nonfreeing_call_p (stmt))
255	    {
256	      pred_info->has_freeing_call_p = true;
257	      break;
258	    }
259	}
260
261      pred_info->has_freeing_call_computed_p = true;
262      if (pred_info->has_freeing_call_p)
263	{
264	  info->imm_dom_path_with_freeing_call_computed_p = true;
265	  info->imm_dom_path_with_freeing_call_p = true;
266	  info->being_visited_p = false;
267	  return true;
268	}
269    }
270
271  FOR_EACH_EDGE (e, ei, bb->preds)
272    {
273      if (e->src == dom)
274	continue;
275
276      basic_block src;
277      for (src = e->src; src != dom; )
278	{
279	  sanopt_info *pred_info = (sanopt_info *) src->aux;
280	  if (pred_info->being_visited_p)
281	    break;
282	  basic_block imm = get_immediate_dominator (CDI_DOMINATORS, src);
283	  if (imm_dom_path_with_freeing_call (src, imm))
284	    {
285	      info->imm_dom_path_with_freeing_call_computed_p = true;
286	      info->imm_dom_path_with_freeing_call_p = true;
287	      info->being_visited_p = false;
288	      return true;
289	    }
290	  src = imm;
291	}
292    }
293
294  info->imm_dom_path_with_freeing_call_computed_p = true;
295  info->imm_dom_path_with_freeing_call_p = false;
296  info->being_visited_p = false;
297  return false;
298}
299
300/* Get the first dominating check from the list of stored checks.
301   Non-dominating checks are silently dropped.  */
302
303static gimple
304maybe_get_dominating_check (auto_vec<gimple> &v)
305{
306  for (; !v.is_empty (); v.pop ())
307    {
308      gimple g = v.last ();
309      sanopt_info *si = (sanopt_info *) gimple_bb (g)->aux;
310      if (!si->visited_p)
311	/* At this point we shouldn't have any statements
312	   that aren't dominating the current BB.  */
313	return g;
314    }
315  return NULL;
316}
317
318/* Optimize away redundant UBSAN_NULL calls.  */
319
320static bool
321maybe_optimize_ubsan_null_ifn (struct sanopt_ctx *ctx, gimple stmt)
322{
323  gcc_assert (gimple_call_num_args (stmt) == 3);
324  tree ptr = gimple_call_arg (stmt, 0);
325  tree cur_align = gimple_call_arg (stmt, 2);
326  gcc_assert (TREE_CODE (cur_align) == INTEGER_CST);
327  bool remove = false;
328
329  auto_vec<gimple> &v = ctx->null_check_map.get_or_insert (ptr);
330  gimple g = maybe_get_dominating_check (v);
331  if (!g)
332    {
333      /* For this PTR we don't have any UBSAN_NULL stmts recorded, so there's
334	 nothing to optimize yet.  */
335      v.safe_push (stmt);
336      return false;
337    }
338
339  /* We already have recorded a UBSAN_NULL check for this pointer. Perhaps we
340     can drop this one.  But only if this check doesn't specify stricter
341     alignment.  */
342
343  tree align = gimple_call_arg (g, 2);
344  int kind = tree_to_shwi (gimple_call_arg (g, 1));
345  /* If this is a NULL pointer check where we had segv anyway, we can
346     remove it.  */
347  if (integer_zerop (align)
348      && (kind == UBSAN_LOAD_OF
349	  || kind == UBSAN_STORE_OF
350	  || kind == UBSAN_MEMBER_ACCESS))
351    remove = true;
352  /* Otherwise remove the check in non-recovering mode, or if the
353     stmts have same location.  */
354  else if (integer_zerop (align))
355    remove = (flag_sanitize_recover & SANITIZE_NULL) == 0
356	      || flag_sanitize_undefined_trap_on_error
357	      || gimple_location (g) == gimple_location (stmt);
358  else if (tree_int_cst_le (cur_align, align))
359    remove = (flag_sanitize_recover & SANITIZE_ALIGNMENT) == 0
360	      || flag_sanitize_undefined_trap_on_error
361	      || gimple_location (g) == gimple_location (stmt);
362
363  if (!remove && gimple_bb (g) == gimple_bb (stmt)
364      && tree_int_cst_compare (cur_align, align) == 0)
365    v.pop ();
366
367  if (!remove)
368    v.safe_push (stmt);
369  return remove;
370}
371
372/* Optimize away redundant UBSAN_VPTR calls.  The second argument
373   is the value loaded from the virtual table, so rely on FRE to find out
374   when we can actually optimize.  */
375
376static bool
377maybe_optimize_ubsan_vptr_ifn (struct sanopt_ctx *ctx, gimple stmt)
378{
379  gcc_assert (gimple_call_num_args (stmt) == 5);
380  sanopt_tree_triplet triplet;
381  triplet.t1 = gimple_call_arg (stmt, 0);
382  triplet.t2 = gimple_call_arg (stmt, 1);
383  triplet.t3 = gimple_call_arg (stmt, 3);
384
385  auto_vec<gimple> &v = ctx->vptr_check_map.get_or_insert (triplet);
386  gimple g = maybe_get_dominating_check (v);
387  if (!g)
388    {
389      /* For this PTR we don't have any UBSAN_VPTR stmts recorded, so there's
390	 nothing to optimize yet.  */
391      v.safe_push (stmt);
392      return false;
393    }
394
395  return true;
396}
397
398/* Returns TRUE if ASan check of length LEN in block BB can be removed
399   if preceded by checks in V.  */
400
401static bool
402can_remove_asan_check (auto_vec<gimple> &v, tree len, basic_block bb)
403{
404  unsigned int i;
405  gimple g;
406  gimple to_pop = NULL;
407  bool remove = false;
408  basic_block last_bb = bb;
409  bool cleanup = false;
410
411  FOR_EACH_VEC_ELT_REVERSE (v, i, g)
412    {
413      basic_block gbb = gimple_bb (g);
414      sanopt_info *si = (sanopt_info *) gbb->aux;
415      if (gimple_uid (g) < si->freeing_call_events)
416	{
417	  /* If there is a potentially freeing call after g in gbb, we should
418	     remove it from the vector, can't use in optimization.  */
419	  cleanup = true;
420	  continue;
421	}
422
423      tree glen = gimple_call_arg (g, 2);
424      gcc_assert (TREE_CODE (glen) == INTEGER_CST);
425
426      /* If we've checked only smaller length than we want to check now,
427	 we can't remove the current stmt.  If g is in the same basic block,
428	 we want to remove it though, as the current stmt is better.  */
429      if (tree_int_cst_lt (glen, len))
430	{
431	  if (gbb == bb)
432	    {
433	      to_pop = g;
434	      cleanup = true;
435	    }
436	  continue;
437	}
438
439      while (last_bb != gbb)
440	{
441	  /* Paths from last_bb to bb have been checked before.
442	     gbb is necessarily a dominator of last_bb, but not necessarily
443	     immediate dominator.  */
444	  if (((sanopt_info *) last_bb->aux)->freeing_call_events)
445	    break;
446
447	  basic_block imm = get_immediate_dominator (CDI_DOMINATORS, last_bb);
448	  gcc_assert (imm);
449	  if (imm_dom_path_with_freeing_call (last_bb, imm))
450	    break;
451
452	  last_bb = imm;
453	}
454      if (last_bb == gbb)
455	remove = true;
456      break;
457    }
458
459  if (cleanup)
460    {
461      unsigned int j = 0, l = v.length ();
462      for (i = 0; i < l; i++)
463	if (v[i] != to_pop
464	    && (gimple_uid (v[i])
465		== ((sanopt_info *)
466		    gimple_bb (v[i])->aux)->freeing_call_events))
467	  {
468	    if (i != j)
469	      v[j] = v[i];
470	    j++;
471	  }
472      v.truncate (j);
473    }
474
475  return remove;
476}
477
478/* Optimize away redundant ASAN_CHECK calls.  */
479
480static bool
481maybe_optimize_asan_check_ifn (struct sanopt_ctx *ctx, gimple stmt)
482{
483  gcc_assert (gimple_call_num_args (stmt) == 4);
484  tree ptr = gimple_call_arg (stmt, 1);
485  tree len = gimple_call_arg (stmt, 2);
486  basic_block bb = gimple_bb (stmt);
487  sanopt_info *info = (sanopt_info *) bb->aux;
488
489  if (TREE_CODE (len) != INTEGER_CST)
490    return false;
491  if (integer_zerop (len))
492    return false;
493
494  gimple_set_uid (stmt, info->freeing_call_events);
495
496  auto_vec<gimple> *ptr_checks = &ctx->asan_check_map.get_or_insert (ptr);
497
498  tree base_addr = maybe_get_single_definition (ptr);
499  auto_vec<gimple> *base_checks = NULL;
500  if (base_addr)
501    {
502      base_checks = &ctx->asan_check_map.get_or_insert (base_addr);
503      /* Original pointer might have been invalidated.  */
504      ptr_checks = ctx->asan_check_map.get (ptr);
505    }
506
507  gimple g = maybe_get_dominating_check (*ptr_checks);
508  gimple g2 = NULL;
509
510  if (base_checks)
511    /* Try with base address as well.  */
512    g2 = maybe_get_dominating_check (*base_checks);
513
514  if (g == NULL && g2 == NULL)
515    {
516      /* For this PTR we don't have any ASAN_CHECK stmts recorded, so there's
517	 nothing to optimize yet.  */
518      ptr_checks->safe_push (stmt);
519      if (base_checks)
520	base_checks->safe_push (stmt);
521      return false;
522    }
523
524  bool remove = false;
525
526  if (ptr_checks)
527    remove = can_remove_asan_check (*ptr_checks, len, bb);
528
529  if (!remove && base_checks)
530    /* Try with base address as well.  */
531    remove = can_remove_asan_check (*base_checks, len, bb);
532
533  if (!remove)
534    {
535      ptr_checks->safe_push (stmt);
536      if (base_checks)
537	base_checks->safe_push (stmt);
538    }
539
540  return remove;
541}
542
543/* Try to optimize away redundant UBSAN_NULL and ASAN_CHECK calls.
544
545   We walk blocks in the CFG via a depth first search of the dominator
546   tree; we push unique UBSAN_NULL or ASAN_CHECK statements into a vector
547   in the NULL_CHECK_MAP or ASAN_CHECK_MAP hash maps as we enter the
548   blocks.  When leaving a block, we mark the block as visited; then
549   when checking the statements in the vector, we ignore statements that
550   are coming from already visited blocks, because these cannot dominate
551   anything anymore.  CTX is a sanopt context.  */
552
553static void
554sanopt_optimize_walker (basic_block bb, struct sanopt_ctx *ctx)
555{
556  basic_block son;
557  gimple_stmt_iterator gsi;
558  sanopt_info *info = (sanopt_info *) bb->aux;
559  bool asan_check_optimize = (flag_sanitize & SANITIZE_ADDRESS) != 0;
560
561  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
562    {
563      gimple stmt = gsi_stmt (gsi);
564      bool remove = false;
565
566      if (!is_gimple_call (stmt))
567	{
568	  /* Handle asm volatile or asm with "memory" clobber
569	     the same as potentionally freeing call.  */
570	  gasm *asm_stmt = dyn_cast <gasm *> (stmt);
571	  if (asm_stmt
572	      && asan_check_optimize
573	      && (gimple_asm_clobbers_memory_p (asm_stmt)
574		  || gimple_asm_volatile_p (asm_stmt)))
575	    info->freeing_call_events++;
576	  gsi_next (&gsi);
577	  continue;
578	}
579
580      if (asan_check_optimize && !nonfreeing_call_p (stmt))
581	info->freeing_call_events++;
582
583      if (gimple_call_internal_p (stmt))
584	switch (gimple_call_internal_fn (stmt))
585	  {
586	  case IFN_UBSAN_NULL:
587	    remove = maybe_optimize_ubsan_null_ifn (ctx, stmt);
588	    break;
589	  case IFN_UBSAN_VPTR:
590	    remove = maybe_optimize_ubsan_vptr_ifn (ctx, stmt);
591	    break;
592	  case IFN_ASAN_CHECK:
593	    if (asan_check_optimize)
594	      remove = maybe_optimize_asan_check_ifn (ctx, stmt);
595	    if (!remove)
596	      ctx->asan_num_accesses++;
597	    break;
598	  default:
599	    break;
600	  }
601
602      if (remove)
603	{
604	  /* Drop this check.  */
605	  if (dump_file && (dump_flags & TDF_DETAILS))
606	    {
607	      fprintf (dump_file, "Optimizing out\n  ");
608	      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
609	      fprintf (dump_file, "\n");
610	    }
611	  unlink_stmt_vdef (stmt);
612	  gsi_remove (&gsi, true);
613	}
614      else
615	gsi_next (&gsi);
616    }
617
618  if (asan_check_optimize)
619    {
620      info->has_freeing_call_p = info->freeing_call_events != 0;
621      info->has_freeing_call_computed_p = true;
622    }
623
624  for (son = first_dom_son (CDI_DOMINATORS, bb);
625       son;
626       son = next_dom_son (CDI_DOMINATORS, son))
627    sanopt_optimize_walker (son, ctx);
628
629  /* We're leaving this BB, so mark it to that effect.  */
630  info->visited_p = true;
631}
632
633/* Try to remove redundant sanitizer checks in function FUN.  */
634
635static int
636sanopt_optimize (function *fun)
637{
638  struct sanopt_ctx ctx;
639  ctx.asan_num_accesses = 0;
640
641  /* Set up block info for each basic block.  */
642  alloc_aux_for_blocks (sizeof (sanopt_info));
643
644  /* We're going to do a dominator walk, so ensure that we have
645     dominance information.  */
646  calculate_dominance_info (CDI_DOMINATORS);
647
648  /* Recursively walk the dominator tree optimizing away
649     redundant checks.  */
650  sanopt_optimize_walker (ENTRY_BLOCK_PTR_FOR_FN (fun), &ctx);
651
652  free_aux_for_blocks ();
653
654  return ctx.asan_num_accesses;
655}
656
657/* Perform optimization of sanitize functions.  */
658
659namespace {
660
661const pass_data pass_data_sanopt =
662{
663  GIMPLE_PASS, /* type */
664  "sanopt", /* name */
665  OPTGROUP_NONE, /* optinfo_flags */
666  TV_NONE, /* tv_id */
667  ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
668  0, /* properties_provided */
669  0, /* properties_destroyed */
670  0, /* todo_flags_start */
671  TODO_update_ssa, /* todo_flags_finish */
672};
673
674class pass_sanopt : public gimple_opt_pass
675{
676public:
677  pass_sanopt (gcc::context *ctxt)
678    : gimple_opt_pass (pass_data_sanopt, ctxt)
679  {}
680
681  /* opt_pass methods: */
682  virtual bool gate (function *) { return flag_sanitize; }
683  virtual unsigned int execute (function *);
684
685}; // class pass_sanopt
686
687unsigned int
688pass_sanopt::execute (function *fun)
689{
690  basic_block bb;
691  int asan_num_accesses = 0;
692
693  /* Try to remove redundant checks.  */
694  if (optimize
695      && (flag_sanitize
696	  & (SANITIZE_NULL | SANITIZE_ALIGNMENT
697	     | SANITIZE_ADDRESS | SANITIZE_VPTR)))
698    asan_num_accesses = sanopt_optimize (fun);
699  else if (flag_sanitize & SANITIZE_ADDRESS)
700    {
701      gimple_stmt_iterator gsi;
702      FOR_EACH_BB_FN (bb, fun)
703	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
704	  {
705 	    gimple stmt = gsi_stmt (gsi);
706	    if (is_gimple_call (stmt) && gimple_call_internal_p (stmt)
707		&& gimple_call_internal_fn (stmt) == IFN_ASAN_CHECK)
708	      ++asan_num_accesses;
709	  }
710    }
711
712  bool use_calls = ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD < INT_MAX
713    && asan_num_accesses >= ASAN_INSTRUMENTATION_WITH_CALL_THRESHOLD;
714
715  FOR_EACH_BB_FN (bb, fun)
716    {
717      gimple_stmt_iterator gsi;
718      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
719	{
720	  gimple stmt = gsi_stmt (gsi);
721	  bool no_next = false;
722
723	  if (!is_gimple_call (stmt))
724	    {
725	      gsi_next (&gsi);
726	      continue;
727	    }
728
729	  if (gimple_call_internal_p (stmt))
730	    {
731	      enum internal_fn ifn = gimple_call_internal_fn (stmt);
732	      switch (ifn)
733		{
734		case IFN_UBSAN_NULL:
735		  no_next = ubsan_expand_null_ifn (&gsi);
736		  break;
737		case IFN_UBSAN_BOUNDS:
738		  no_next = ubsan_expand_bounds_ifn (&gsi);
739		  break;
740		case IFN_UBSAN_OBJECT_SIZE:
741		  no_next = ubsan_expand_objsize_ifn (&gsi);
742		  break;
743		case IFN_UBSAN_VPTR:
744		  no_next = ubsan_expand_vptr_ifn (&gsi);
745		  break;
746		case IFN_ASAN_CHECK:
747		  no_next = asan_expand_check_ifn (&gsi, use_calls);
748		  break;
749		default:
750		  break;
751		}
752	    }
753	  else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
754	    {
755	      tree callee = gimple_call_fndecl (stmt);
756	      switch (DECL_FUNCTION_CODE (callee))
757		{
758		case BUILT_IN_UNREACHABLE:
759		  if (flag_sanitize & SANITIZE_UNREACHABLE
760		      && !lookup_attribute ("no_sanitize_undefined",
761					    DECL_ATTRIBUTES (fun->decl)))
762		    no_next = ubsan_instrument_unreachable (&gsi);
763		  break;
764		default:
765		  break;
766		}
767	    }
768
769	  if (dump_file && (dump_flags & TDF_DETAILS))
770	    {
771	      fprintf (dump_file, "Expanded\n  ");
772	      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
773	      fprintf (dump_file, "\n");
774	    }
775
776	  if (!no_next)
777	    gsi_next (&gsi);
778	}
779    }
780  return 0;
781}
782
783} // anon namespace
784
785gimple_opt_pass *
786make_pass_sanopt (gcc::context *ctxt)
787{
788  return new pass_sanopt (ctxt);
789}
790