1/* Pointer Bounds Checker insrumentation pass.
2   Copyright (C) 2014-2015 Free Software Foundation, Inc.
3   Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "hash-set.h"
25#include "machmode.h"
26#include "vec.h"
27#include "double-int.h"
28#include "input.h"
29#include "alias.h"
30#include "symtab.h"
31#include "options.h"
32#include "wide-int.h"
33#include "inchash.h"
34#include "tree.h"
35#include "fold-const.h"
36#include "stor-layout.h"
37#include "varasm.h"
38#include "target.h"
39#include "tree-iterator.h"
40#include "tree-cfg.h"
41#include "langhooks.h"
42#include "tree-pass.h"
43#include "diagnostic.h"
44#include "ggc.h"
45#include "is-a.h"
46#include "cfgloop.h"
47#include "stringpool.h"
48#include "tree-ssa-alias.h"
49#include "tree-ssanames.h"
50#include "tree-ssa-operands.h"
51#include "tree-ssa-address.h"
52#include "tree-ssa.h"
53#include "predict.h"
54#include "dominance.h"
55#include "cfg.h"
56#include "basic-block.h"
57#include "tree-ssa-loop-niter.h"
58#include "gimple-expr.h"
59#include "gimple.h"
60#include "tree-phinodes.h"
61#include "gimple-ssa.h"
62#include "ssa-iterators.h"
63#include "gimple-pretty-print.h"
64#include "gimple-iterator.h"
65#include "gimplify.h"
66#include "gimplify-me.h"
67#include "print-tree.h"
68#include "hashtab.h"
69#include "tm.h"
70#include "hard-reg-set.h"
71#include "function.h"
72#include "rtl.h"
73#include "flags.h"
74#include "statistics.h"
75#include "real.h"
76#include "fixed-value.h"
77#include "insn-config.h"
78#include "expmed.h"
79#include "dojump.h"
80#include "explow.h"
81#include "calls.h"
82#include "emit-rtl.h"
83#include "stmt.h"
84#include "expr.h"
85#include "tree-ssa-propagate.h"
86#include "gimple-fold.h"
87#include "tree-chkp.h"
88#include "gimple-walk.h"
89#include "rtl.h" /* For MEM_P, assign_temp.  */
90#include "tree-dfa.h"
91#include "ipa-ref.h"
92#include "lto-streamer.h"
93#include "cgraph.h"
94#include "ipa-chkp.h"
95#include "params.h"
96
97/*  Pointer Bounds Checker instruments code with memory checks to find
98    out-of-bounds memory accesses.  Checks are performed by computing
99    bounds for each pointer and then comparing address of accessed
100    memory before pointer dereferencing.
101
102    1. Function clones.
103
104    See ipa-chkp.c.
105
106    2. Instrumentation.
107
108    There are few things to instrument:
109
110    a) Memory accesses - add checker calls to check address of accessed memory
111    against bounds of dereferenced pointer.  Obviously safe memory
112    accesses like static variable access does not have to be instrumented
113    with checks.
114
115    Example:
116
117      val_2 = *p_1;
118
119      with 4 bytes access is transformed into:
120
121      __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
122      D.1_4 = p_1 + 3;
123      __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
124      val_2 = *p_1;
125
126      where __bound_tmp.1_3 are bounds computed for pointer p_1,
127      __builtin___chkp_bndcl is a lower bound check and
128      __builtin___chkp_bndcu is an upper bound check.
129
130    b) Pointer stores.
131
132    When pointer is stored in memory we need to store its bounds.  To
133    achieve compatibility of instrumented code with regular codes
134    we have to keep data layout and store bounds in special bound tables
135    via special checker call.  Implementation of bounds table may vary for
136    different platforms.  It has to associate pointer value and its
137    location (it is required because we may have two equal pointers
138    with different bounds stored in different places) with bounds.
139    Another checker builtin allows to get bounds for specified pointer
140    loaded from specified location.
141
142    Example:
143
144      buf1[i_1] = &buf2;
145
146      is transformed into:
147
148      buf1[i_1] = &buf2;
149      D.1_2 = &buf1[i_1];
150      __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
151
152      where __bound_tmp.1_2 are bounds of &buf2.
153
154    c) Static initialization.
155
156    The special case of pointer store is static pointer initialization.
157    Bounds initialization is performed in a few steps:
158      - register all static initializations in front-end using
159      chkp_register_var_initializer
160      - when file compilation finishes we create functions with special
161      attribute 'chkp ctor' and put explicit initialization code
162      (assignments) for all statically initialized pointers.
163      - when checker constructor is compiled checker pass adds required
164      bounds initialization for all statically initialized pointers
165      - since we do not actually need excess pointers initialization
166      in checker constructor we remove such assignments from them
167
168    d) Calls.
169
170    For each call in the code we add additional arguments to pass
171    bounds for pointer arguments.  We determine type of call arguments
172    using arguments list from function declaration; if function
173    declaration is not available we use function type; otherwise
174    (e.g. for unnamed arguments) we use type of passed value. Function
175    declaration/type is replaced with the instrumented one.
176
177    Example:
178
179      val_1 = foo (&buf1, &buf2, &buf1, 0);
180
181      is translated into:
182
183      val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
184                        &buf1, __bound_tmp.1_2, 0);
185
186    e) Returns.
187
188    If function returns a pointer value we have to return bounds also.
189    A new operand was added for return statement to hold returned bounds.
190
191    Example:
192
193      return &_buf1;
194
195      is transformed into
196
197      return &_buf1, __bound_tmp.1_1;
198
199    3. Bounds computation.
200
201    Compiler is fully responsible for computing bounds to be used for each
202    memory access.  The first step for bounds computation is to find the
203    origin of pointer dereferenced for memory access.  Basing on pointer
204    origin we define a way to compute its bounds.  There are just few
205    possible cases:
206
207    a) Pointer is returned by call.
208
209    In this case we use corresponding checker builtin method to obtain returned
210    bounds.
211
212    Example:
213
214      buf_1 = malloc (size_2);
215      foo (buf_1);
216
217      is translated into:
218
219      buf_1 = malloc (size_2);
220      __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
221      foo (buf_1, __bound_tmp.1_3);
222
223    b) Pointer is an address of an object.
224
225    In this case compiler tries to compute objects size and create corresponding
226    bounds.  If object has incomplete type then special checker builtin is used to
227    obtain its size at runtime.
228
229    Example:
230
231      foo ()
232      {
233        <unnamed type> __bound_tmp.3;
234	static int buf[100];
235
236	<bb 3>:
237	__bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
238
239	<bb 2>:
240	return &buf, __bound_tmp.3_2;
241      }
242
243    Example:
244
245      Address of an object 'extern int buf[]' with incomplete type is
246      returned.
247
248      foo ()
249      {
250        <unnamed type> __bound_tmp.4;
251	long unsigned int __size_tmp.3;
252
253	<bb 3>:
254	__size_tmp.3_4 = __builtin_ia32_sizeof (buf);
255	__bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
256
257	<bb 2>:
258	return &buf, __bound_tmp.4_3;
259      }
260
261    c) Pointer is the result of object narrowing.
262
263    It happens when we use pointer to an object to compute pointer to a part
264    of an object.  E.g. we take pointer to a field of a structure. In this
265    case we perform bounds intersection using bounds of original object and
266    bounds of object's part (which are computed basing on its type).
267
268    There may be some debatable questions about when narrowing should occur
269    and when it should not.  To avoid false bound violations in correct
270    programs we do not perform narrowing when address of an array element is
271    obtained (it has address of the whole array) and when address of the first
272    structure field is obtained (because it is guaranteed to be equal to
273    address of the whole structure and it is legal to cast it back to structure).
274
275    Default narrowing behavior may be changed using compiler flags.
276
277    Example:
278
279      In this example address of the second structure field is returned.
280
281      foo (struct A * p, __bounds_type __bounds_of_p)
282      {
283        <unnamed type> __bound_tmp.3;
284	int * _2;
285	int * _5;
286
287	<bb 2>:
288	_5 = &p_1(D)->second_field;
289	__bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
290	__bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
291	                                              __bounds_of_p_3(D));
292	_2 = &p_1(D)->second_field;
293	return _2, __bound_tmp.3_8;
294      }
295
296    Example:
297
298      In this example address of the first field of array element is returned.
299
300      foo (struct A * p, __bounds_type __bounds_of_p, int i)
301      {
302	long unsigned int _3;
303	long unsigned int _4;
304	struct A * _6;
305	int * _7;
306
307	<bb 2>:
308	_3 = (long unsigned int) i_1(D);
309	_4 = _3 * 8;
310	_6 = p_5(D) + _4;
311	_7 = &_6->first_field;
312	return _7, __bounds_of_p_2(D);
313      }
314
315
316    d) Pointer is the result of pointer arithmetic or type cast.
317
318    In this case bounds of the base pointer are used.  In case of binary
319    operation producing a pointer we are analyzing data flow further
320    looking for operand's bounds.  One operand is considered as a base
321    if it has some valid bounds.  If we fall into a case when none of
322    operands (or both of them) has valid bounds, a default bounds value
323    is used.
324
325    Trying to find out bounds for binary operations we may fall into
326    cyclic dependencies for pointers.  To avoid infinite recursion all
327    walked phi nodes instantly obtain corresponding bounds but created
328    bounds are marked as incomplete.  It helps us to stop DF walk during
329    bounds search.
330
331    When we reach pointer source, some args of incomplete bounds phi obtain
332    valid bounds and those values are propagated further through phi nodes.
333    If no valid bounds were found for phi node then we mark its result as
334    invalid bounds.  Process stops when all incomplete bounds become either
335    valid or invalid and we are able to choose a pointer base.
336
337    e) Pointer is loaded from the memory.
338
339    In this case we just need to load bounds from the bounds table.
340
341    Example:
342
343      foo ()
344      {
345        <unnamed type> __bound_tmp.3;
346	static int * buf;
347	int * _2;
348
349	<bb 2>:
350	_2 = buf;
351	__bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
352	return _2, __bound_tmp.3_4;
353      }
354
355*/
356
357typedef void (*assign_handler)(tree, tree, void *);
358
359static tree chkp_get_zero_bounds ();
360static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
361static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
362				     gimple_stmt_iterator *iter);
363static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
364						tree *elt, bool *safe,
365						bool *bitfield,
366						tree *bounds,
367						gimple_stmt_iterator *iter,
368						bool innermost_bounds);
369
370#define chkp_bndldx_fndecl \
371  (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
372#define chkp_bndstx_fndecl \
373  (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
374#define chkp_checkl_fndecl \
375  (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
376#define chkp_checku_fndecl \
377  (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
378#define chkp_bndmk_fndecl \
379  (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
380#define chkp_ret_bnd_fndecl \
381  (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
382#define chkp_intersect_fndecl \
383  (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
384#define chkp_narrow_bounds_fndecl \
385  (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
386#define chkp_sizeof_fndecl \
387  (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
388#define chkp_extract_lower_fndecl \
389  (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
390#define chkp_extract_upper_fndecl \
391  (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
392
393static GTY (()) tree chkp_uintptr_type;
394
395static GTY (()) tree chkp_zero_bounds_var;
396static GTY (()) tree chkp_none_bounds_var;
397
398static GTY (()) basic_block entry_block;
399static GTY (()) tree zero_bounds;
400static GTY (()) tree none_bounds;
401static GTY (()) tree incomplete_bounds;
402static GTY (()) tree tmp_var;
403static GTY (()) tree size_tmp_var;
404static GTY (()) bitmap chkp_abnormal_copies;
405
406struct hash_set<tree> *chkp_invalid_bounds;
407struct hash_set<tree> *chkp_completed_bounds_set;
408struct hash_map<tree, tree> *chkp_reg_bounds;
409struct hash_map<tree, tree> *chkp_bound_vars;
410struct hash_map<tree, tree> *chkp_reg_addr_bounds;
411struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
412struct hash_map<tree, tree> *chkp_bounds_map;
413struct hash_map<tree, tree> *chkp_static_var_bounds;
414
415static bool in_chkp_pass;
416
417#define CHKP_BOUND_TMP_NAME "__bound_tmp"
418#define CHKP_SIZE_TMP_NAME "__size_tmp"
419#define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
420#define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
421#define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
422#define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
423#define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
424
425/* Static checker constructors may become very large and their
426   compilation with optimization may take too much time.
427   Therefore we put a limit to number of statements in one
428   constructor.  Tests with 100 000 statically initialized
429   pointers showed following compilation times on Sandy Bridge
430   server (used -O2):
431   limit    100 => ~18 sec.
432   limit    300 => ~22 sec.
433   limit   1000 => ~30 sec.
434   limit   3000 => ~49 sec.
435   limit   5000 => ~55 sec.
436   limit  10000 => ~76 sec.
437   limit 100000 => ~532 sec.  */
438#define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
439
440struct chkp_ctor_stmt_list
441{
442  tree stmts;
443  int avail;
444};
445
446/* Return 1 if function FNDECL is instrumented by Pointer
447   Bounds Checker.  */
448bool
449chkp_function_instrumented_p (tree fndecl)
450{
451  return fndecl
452    && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
453}
454
455/* Mark function FNDECL as instrumented.  */
456void
457chkp_function_mark_instrumented (tree fndecl)
458{
459  if (chkp_function_instrumented_p (fndecl))
460    return;
461
462  DECL_ATTRIBUTES (fndecl)
463    = tree_cons (get_identifier ("chkp instrumented"), NULL,
464		 DECL_ATTRIBUTES (fndecl));
465}
466
467/* Return true when STMT is builtin call to instrumentation function
468   corresponding to CODE.  */
469
470bool
471chkp_gimple_call_builtin_p (gimple call,
472			    enum built_in_function code)
473{
474  tree fndecl;
475  if (is_gimple_call (call)
476      && (fndecl = targetm.builtin_chkp_function (code))
477      && gimple_call_fndecl (call) == fndecl)
478    return true;
479  return false;
480}
481
482/* Emit code to build zero bounds and return RTL holding
483   the result.  */
484rtx
485chkp_expand_zero_bounds ()
486{
487  tree zero_bnd;
488
489  if (flag_chkp_use_static_const_bounds)
490    zero_bnd = chkp_get_zero_bounds_var ();
491  else
492    zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
493					    integer_zero_node);
494  return expand_normal (zero_bnd);
495}
496
497/* Emit code to store zero bounds for PTR located at MEM.  */
498void
499chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
500{
501  tree zero_bnd, bnd, addr, bndstx;
502
503  if (flag_chkp_use_static_const_bounds)
504    zero_bnd = chkp_get_zero_bounds_var ();
505  else
506    zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
507					    integer_zero_node);
508  bnd = make_tree (pointer_bounds_type_node,
509		   assign_temp (pointer_bounds_type_node, 0, 1));
510  addr = build1 (ADDR_EXPR,
511		 build_pointer_type (TREE_TYPE (mem)), mem);
512  bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
513
514  expand_assignment (bnd, zero_bnd, false);
515  expand_normal (bndstx);
516}
517
518/* Build retbnd call for returned value RETVAL.
519
520   If BNDVAL is not NULL then result is stored
521   in it.  Otherwise a temporary is created to
522   hold returned value.
523
524   GSI points to a position for a retbnd call
525   and is set to created stmt.
526
527   Cgraph edge is created for a new call if
528   UPDATE_EDGE is 1.
529
530   Obtained bounds are returned.  */
531tree
532chkp_insert_retbnd_call (tree bndval, tree retval,
533			 gimple_stmt_iterator *gsi)
534{
535  gimple call;
536
537  if (!bndval)
538    bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
539
540  call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
541  gimple_call_set_lhs (call, bndval);
542  gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
543
544  return bndval;
545}
546
547/* Build a GIMPLE_CALL identical to CALL but skipping bounds
548   arguments.  */
549
550gcall *
551chkp_copy_call_skip_bounds (gcall *call)
552{
553  bitmap bounds;
554  unsigned i;
555
556  bitmap_obstack_initialize (NULL);
557  bounds = BITMAP_ALLOC (NULL);
558
559  for (i = 0; i < gimple_call_num_args (call); i++)
560    if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
561      bitmap_set_bit (bounds, i);
562
563  if (!bitmap_empty_p (bounds))
564    call = gimple_call_copy_skip_args (call, bounds);
565  gimple_call_set_with_bounds (call, false);
566
567  BITMAP_FREE (bounds);
568  bitmap_obstack_release (NULL);
569
570  return call;
571}
572
573/* Redirect edge E to the correct node according to call_stmt.
574   Return 1 if bounds removal from call_stmt should be done
575   instead of redirection.  */
576
577bool
578chkp_redirect_edge (cgraph_edge *e)
579{
580  bool instrumented = false;
581  tree decl = e->callee->decl;
582
583  if (e->callee->instrumentation_clone
584      || chkp_function_instrumented_p (decl))
585    instrumented = true;
586
587  if (instrumented
588      && !gimple_call_with_bounds_p (e->call_stmt))
589    e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
590  else if (!instrumented
591	   && gimple_call_with_bounds_p (e->call_stmt)
592	   && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
593	   && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
594	   && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
595    {
596      if (e->callee->instrumented_version)
597	e->redirect_callee (e->callee->instrumented_version);
598      else
599	{
600	  tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
601	  /* Avoid bounds removal if all args will be removed.  */
602	  if (!args || TREE_VALUE (args) != void_type_node)
603	    return true;
604	  else
605	    gimple_call_set_with_bounds (e->call_stmt, false);
606	}
607    }
608
609  return false;
610}
611
612/* Mark statement S to not be instrumented.  */
613static void
614chkp_mark_stmt (gimple s)
615{
616  gimple_set_plf (s, GF_PLF_1, true);
617}
618
619/* Mark statement S to be instrumented.  */
620static void
621chkp_unmark_stmt (gimple s)
622{
623  gimple_set_plf (s, GF_PLF_1, false);
624}
625
626/* Return 1 if statement S should not be instrumented.  */
627static bool
628chkp_marked_stmt_p (gimple s)
629{
630  return gimple_plf (s, GF_PLF_1);
631}
632
633/* Get var to be used for bound temps.  */
634static tree
635chkp_get_tmp_var (void)
636{
637  if (!tmp_var)
638    tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
639
640  return tmp_var;
641}
642
643/* Get SSA_NAME to be used as temp.  */
644static tree
645chkp_get_tmp_reg (gimple stmt)
646{
647  if (in_chkp_pass)
648    return make_ssa_name (chkp_get_tmp_var (), stmt);
649
650  return make_temp_ssa_name (pointer_bounds_type_node, stmt,
651			     CHKP_BOUND_TMP_NAME);
652}
653
654/* Get var to be used for size temps.  */
655static tree
656chkp_get_size_tmp_var (void)
657{
658  if (!size_tmp_var)
659    size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
660
661  return size_tmp_var;
662}
663
664/* Register bounds BND for address of OBJ.  */
665static void
666chkp_register_addr_bounds (tree obj, tree bnd)
667{
668  if (bnd == incomplete_bounds)
669    return;
670
671  chkp_reg_addr_bounds->put (obj, bnd);
672
673  if (dump_file && (dump_flags & TDF_DETAILS))
674    {
675      fprintf (dump_file, "Regsitered bound ");
676      print_generic_expr (dump_file, bnd, 0);
677      fprintf (dump_file, " for address of ");
678      print_generic_expr (dump_file, obj, 0);
679      fprintf (dump_file, "\n");
680    }
681}
682
683/* Return bounds registered for address of OBJ.  */
684static tree
685chkp_get_registered_addr_bounds (tree obj)
686{
687  tree *slot = chkp_reg_addr_bounds->get (obj);
688  return slot ? *slot : NULL_TREE;
689}
690
691/* Mark BOUNDS as completed.  */
692static void
693chkp_mark_completed_bounds (tree bounds)
694{
695  chkp_completed_bounds_set->add (bounds);
696
697  if (dump_file && (dump_flags & TDF_DETAILS))
698    {
699      fprintf (dump_file, "Marked bounds ");
700      print_generic_expr (dump_file, bounds, 0);
701      fprintf (dump_file, " as completed\n");
702    }
703}
704
705/* Return 1 if BOUNDS were marked as completed and 0 otherwise.  */
706static bool
707chkp_completed_bounds (tree bounds)
708{
709  return chkp_completed_bounds_set->contains (bounds);
710}
711
712/* Clear comleted bound marks.  */
713static void
714chkp_erase_completed_bounds (void)
715{
716  delete chkp_completed_bounds_set;
717  chkp_completed_bounds_set = new hash_set<tree>;
718}
719
720/* Mark BOUNDS associated with PTR as incomplete.  */
721static void
722chkp_register_incomplete_bounds (tree bounds, tree ptr)
723{
724  chkp_incomplete_bounds_map->put (bounds, ptr);
725
726  if (dump_file && (dump_flags & TDF_DETAILS))
727    {
728      fprintf (dump_file, "Regsitered incomplete bounds ");
729      print_generic_expr (dump_file, bounds, 0);
730      fprintf (dump_file, " for ");
731      print_generic_expr (dump_file, ptr, 0);
732      fprintf (dump_file, "\n");
733    }
734}
735
736/* Return 1 if BOUNDS are incomplete and 0 otherwise.  */
737static bool
738chkp_incomplete_bounds (tree bounds)
739{
740  if (bounds == incomplete_bounds)
741    return true;
742
743  if (chkp_completed_bounds (bounds))
744    return false;
745
746  return chkp_incomplete_bounds_map->get (bounds) != NULL;
747}
748
749/* Clear incomleted bound marks.  */
750static void
751chkp_erase_incomplete_bounds (void)
752{
753  delete chkp_incomplete_bounds_map;
754  chkp_incomplete_bounds_map = new hash_map<tree, tree>;
755}
756
757/* Build and return bndmk call which creates bounds for structure
758   pointed by PTR.  Structure should have complete type.  */
759tree
760chkp_make_bounds_for_struct_addr (tree ptr)
761{
762  tree type = TREE_TYPE (ptr);
763  tree size;
764
765  gcc_assert (POINTER_TYPE_P (type));
766
767  size = TYPE_SIZE (TREE_TYPE (type));
768
769  gcc_assert (size);
770
771  return build_call_nary (pointer_bounds_type_node,
772			  build_fold_addr_expr (chkp_bndmk_fndecl),
773			  2, ptr, size);
774}
775
776/* Traversal function for chkp_may_finish_incomplete_bounds.
777   Set RES to 0 if at least one argument of phi statement
778   defining bounds (passed in KEY arg) is unknown.
779   Traversal stops when first unknown phi argument is found.  */
780bool
781chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
782			      bool *res)
783{
784  gimple phi;
785  unsigned i;
786
787  gcc_assert (TREE_CODE (bounds) == SSA_NAME);
788
789  phi = SSA_NAME_DEF_STMT (bounds);
790
791  gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
792
793  for (i = 0; i < gimple_phi_num_args (phi); i++)
794    {
795      tree phi_arg = gimple_phi_arg_def (phi, i);
796      if (!phi_arg)
797	{
798	  *res = false;
799	  /* Do not need to traverse further.  */
800	  return false;
801	}
802    }
803
804  return true;
805}
806
807/* Return 1 if all phi nodes created for bounds have their
808   arguments computed.  */
809static bool
810chkp_may_finish_incomplete_bounds (void)
811{
812  bool res = true;
813
814  chkp_incomplete_bounds_map
815    ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
816
817  return res;
818}
819
820/* Helper function for chkp_finish_incomplete_bounds.
821   Recompute args for bounds phi node.  */
822bool
823chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
824			   void *res ATTRIBUTE_UNUSED)
825{
826  tree ptr = *slot;
827  gphi *bounds_phi;
828  gphi *ptr_phi;
829  unsigned i;
830
831  gcc_assert (TREE_CODE (bounds) == SSA_NAME);
832  gcc_assert (TREE_CODE (ptr) == SSA_NAME);
833
834  bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
835  ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
836
837  for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
838    {
839      tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
840      tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
841
842      add_phi_arg (bounds_phi, bound_arg,
843		   gimple_phi_arg_edge (ptr_phi, i),
844		   UNKNOWN_LOCATION);
845    }
846
847  return true;
848}
849
850/* Mark BOUNDS as invalid.  */
851static void
852chkp_mark_invalid_bounds (tree bounds)
853{
854  chkp_invalid_bounds->add (bounds);
855
856  if (dump_file && (dump_flags & TDF_DETAILS))
857    {
858      fprintf (dump_file, "Marked bounds ");
859      print_generic_expr (dump_file, bounds, 0);
860      fprintf (dump_file, " as invalid\n");
861    }
862}
863
864/* Return 1 if BOUNDS were marked as invalid and 0 otherwise.  */
865static bool
866chkp_valid_bounds (tree bounds)
867{
868  if (bounds == zero_bounds || bounds == none_bounds)
869    return false;
870
871  return !chkp_invalid_bounds->contains (bounds);
872}
873
874/* Helper function for chkp_finish_incomplete_bounds.
875   Check all arguments of phi nodes trying to find
876   valid completed bounds.  If there is at least one
877   such arg then bounds produced by phi node are marked
878   as valid completed bounds and all phi args are
879   recomputed.  */
880bool
881chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
882{
883  gimple phi;
884  unsigned i;
885
886  gcc_assert (TREE_CODE (bounds) == SSA_NAME);
887
888  if (chkp_completed_bounds (bounds))
889    return true;
890
891  phi = SSA_NAME_DEF_STMT (bounds);
892
893  gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
894
895  for (i = 0; i < gimple_phi_num_args (phi); i++)
896    {
897      tree phi_arg = gimple_phi_arg_def (phi, i);
898
899      gcc_assert (phi_arg);
900
901      if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
902	{
903	  *res = true;
904	  chkp_mark_completed_bounds (bounds);
905	  chkp_recompute_phi_bounds (bounds, slot, NULL);
906	  return true;
907	}
908    }
909
910  return true;
911}
912
913/* Helper function for chkp_finish_incomplete_bounds.
914   Marks all incompleted bounds as invalid.  */
915bool
916chkp_mark_invalid_bounds_walker (tree const &bounds,
917				 tree *slot ATTRIBUTE_UNUSED,
918				 void *res ATTRIBUTE_UNUSED)
919{
920  if (!chkp_completed_bounds (bounds))
921    {
922      chkp_mark_invalid_bounds (bounds);
923      chkp_mark_completed_bounds (bounds);
924    }
925  return true;
926}
927
928/* When all bound phi nodes have all their args computed
929   we have enough info to find valid bounds.  We iterate
930   through all incompleted bounds searching for valid
931   bounds.  Found valid bounds are marked as completed
932   and all remaining incompleted bounds are recomputed.
933   Process continues until no new valid bounds may be
934   found.  All remained incompleted bounds are marked as
935   invalid (i.e. have no valid source of bounds).  */
936static void
937chkp_finish_incomplete_bounds (void)
938{
939  bool found_valid;
940
941  while (found_valid)
942    {
943      found_valid = false;
944
945      chkp_incomplete_bounds_map->
946	traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
947
948      if (found_valid)
949	chkp_incomplete_bounds_map->
950	  traverse<void *, chkp_recompute_phi_bounds> (NULL);
951    }
952
953  chkp_incomplete_bounds_map->
954    traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
955  chkp_incomplete_bounds_map->
956    traverse<void *, chkp_recompute_phi_bounds> (NULL);
957
958  chkp_erase_completed_bounds ();
959  chkp_erase_incomplete_bounds ();
960}
961
962/* Return 1 if type TYPE is a pointer type or a
963   structure having a pointer type as one of its fields.
964   Otherwise return 0.  */
965bool
966chkp_type_has_pointer (const_tree type)
967{
968  bool res = false;
969
970  if (BOUNDED_TYPE_P (type))
971    res = true;
972  else if (RECORD_OR_UNION_TYPE_P (type))
973    {
974      tree field;
975
976      for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
977	if (TREE_CODE (field) == FIELD_DECL)
978	  res = res || chkp_type_has_pointer (TREE_TYPE (field));
979    }
980  else if (TREE_CODE (type) == ARRAY_TYPE)
981    res = chkp_type_has_pointer (TREE_TYPE (type));
982
983  return res;
984}
985
986unsigned
987chkp_type_bounds_count (const_tree type)
988{
989  unsigned res = 0;
990
991  if (!type)
992    res = 0;
993  else if (BOUNDED_TYPE_P (type))
994    res = 1;
995  else if (RECORD_OR_UNION_TYPE_P (type))
996    {
997      bitmap have_bound;
998
999      bitmap_obstack_initialize (NULL);
1000      have_bound = BITMAP_ALLOC (NULL);
1001      chkp_find_bound_slots (type, have_bound);
1002      res = bitmap_count_bits (have_bound);
1003      BITMAP_FREE (have_bound);
1004      bitmap_obstack_release (NULL);
1005    }
1006
1007  return res;
1008}
1009
1010/* Get bounds associated with NODE via
1011   chkp_set_bounds call.  */
1012tree
1013chkp_get_bounds (tree node)
1014{
1015  tree *slot;
1016
1017  if (!chkp_bounds_map)
1018    return NULL_TREE;
1019
1020  slot = chkp_bounds_map->get (node);
1021  return slot ? *slot : NULL_TREE;
1022}
1023
1024/* Associate bounds VAL with NODE.  */
1025void
1026chkp_set_bounds (tree node, tree val)
1027{
1028  if (!chkp_bounds_map)
1029    chkp_bounds_map = new hash_map<tree, tree>;
1030
1031  chkp_bounds_map->put (node, val);
1032}
1033
1034/* Check if statically initialized variable VAR require
1035   static bounds initialization.  If VAR is added into
1036   bounds initlization list then 1 is returned. Otherwise
1037   return 0.  */
1038extern bool
1039chkp_register_var_initializer (tree var)
1040{
1041  if (!flag_check_pointer_bounds
1042      || DECL_INITIAL (var) == error_mark_node)
1043    return false;
1044
1045  gcc_assert (TREE_CODE (var) == VAR_DECL);
1046  gcc_assert (DECL_INITIAL (var));
1047
1048  if (TREE_STATIC (var)
1049      && chkp_type_has_pointer (TREE_TYPE (var)))
1050    {
1051      varpool_node::get_create (var)->need_bounds_init = 1;
1052      return true;
1053    }
1054
1055  return false;
1056}
1057
1058/* Helper function for chkp_finish_file.
1059
1060   Add new modification statement (RHS is assigned to LHS)
1061   into list of static initializer statementes (passed in ARG).
1062   If statements list becomes too big, emit checker constructor
1063   and start the new one.  */
1064static void
1065chkp_add_modification_to_stmt_list (tree lhs,
1066				    tree rhs,
1067				    void *arg)
1068{
1069  struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1070  tree modify;
1071
1072  if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1073    rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1074
1075  modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1076  append_to_statement_list (modify, &stmts->stmts);
1077
1078  stmts->avail--;
1079}
1080
1081/* Build and return ADDR_EXPR for specified object OBJ.  */
1082static tree
1083chkp_build_addr_expr (tree obj)
1084{
1085  return TREE_CODE (obj) == TARGET_MEM_REF
1086    ? tree_mem_ref_addr (ptr_type_node, obj)
1087    : build_fold_addr_expr (obj);
1088}
1089
1090/* Helper function for chkp_finish_file.
1091   Initialize bound variable BND_VAR with bounds of variable
1092   VAR to statements list STMTS.  If statements list becomes
1093   too big, emit checker constructor and start the new one.  */
1094static void
1095chkp_output_static_bounds (tree bnd_var, tree var,
1096			   struct chkp_ctor_stmt_list *stmts)
1097{
1098  tree lb, ub, size;
1099
1100  if (TREE_CODE (var) == STRING_CST)
1101    {
1102      lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1103      size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1104    }
1105  else if (DECL_SIZE (var)
1106	   && !chkp_variable_size_type (TREE_TYPE (var)))
1107    {
1108      /* Compute bounds using statically known size.  */
1109      lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1110      size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1111    }
1112  else
1113    {
1114      /* Compute bounds using dynamic size.  */
1115      tree call;
1116
1117      lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1118      call = build1 (ADDR_EXPR,
1119		     build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1120		     chkp_sizeof_fndecl);
1121      size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1122			      call, 1, var);
1123
1124      if (flag_chkp_zero_dynamic_size_as_infinite)
1125	{
1126	  tree max_size, cond;
1127
1128	  max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1129	  cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1130	  size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1131	}
1132
1133      size = size_binop (MINUS_EXPR, size, size_one_node);
1134    }
1135
1136  ub = size_binop (PLUS_EXPR, lb, size);
1137  stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1138						  &stmts->stmts);
1139  if (stmts->avail <= 0)
1140    {
1141      cgraph_build_static_cdtor ('B', stmts->stmts,
1142				 MAX_RESERVED_INIT_PRIORITY + 2);
1143      stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1144      stmts->stmts = NULL;
1145    }
1146}
1147
1148/* Return entry block to be used for checker initilization code.
1149   Create new block if required.  */
1150static basic_block
1151chkp_get_entry_block (void)
1152{
1153  if (!entry_block)
1154    entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1155
1156  return entry_block;
1157}
1158
1159/* Return a bounds var to be used for pointer var PTR_VAR.  */
1160static tree
1161chkp_get_bounds_var (tree ptr_var)
1162{
1163  tree bnd_var;
1164  tree *slot;
1165
1166  slot = chkp_bound_vars->get (ptr_var);
1167  if (slot)
1168    bnd_var = *slot;
1169  else
1170    {
1171      bnd_var = create_tmp_reg (pointer_bounds_type_node,
1172				CHKP_BOUND_TMP_NAME);
1173      chkp_bound_vars->put (ptr_var, bnd_var);
1174    }
1175
1176  return bnd_var;
1177}
1178
1179/* If BND is an abnormal bounds copy, return a copied value.
1180   Otherwise return BND.  */
1181static tree
1182chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1183{
1184  if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1185    {
1186      gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1187      gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1188      bnd = gimple_assign_rhs1 (bnd_def);
1189    }
1190
1191  return bnd;
1192}
1193
1194/* Register bounds BND for object PTR in global bounds table.
1195   A copy of bounds may be created for abnormal ssa names.
1196   Returns bounds to use for PTR.  */
1197static tree
1198chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1199{
1200  bool abnormal_ptr;
1201
1202  if (!chkp_reg_bounds)
1203    return bnd;
1204
1205  /* Do nothing if bounds are incomplete_bounds
1206     because it means bounds will be recomputed.  */
1207  if (bnd == incomplete_bounds)
1208    return bnd;
1209
1210  abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1211		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1212		  && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1213
1214  /* A single bounds value may be reused multiple times for
1215     different pointer values.  It may cause coalescing issues
1216     for abnormal SSA names.  To avoid it we create a bounds
1217     copy in case it is computed for abnormal SSA name.
1218
1219     We also cannot reuse such created copies for other pointers  */
1220  if (abnormal_ptr
1221      || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1222    {
1223      tree bnd_var = NULL_TREE;
1224
1225      if (abnormal_ptr)
1226	{
1227	  if (SSA_NAME_VAR (ptr))
1228	    bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1229	}
1230      else
1231	bnd_var = chkp_get_tmp_var ();
1232
1233      /* For abnormal copies we may just find original
1234	 bounds and use them.  */
1235      if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1236	bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1237      /* For undefined values we usually use none bounds
1238	 value but in case of abnormal edge it may cause
1239	 coalescing failures.  Use default definition of
1240	 bounds variable instead to avoid it.  */
1241      else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1242	       && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1243	{
1244	  bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1245
1246	  if (dump_file && (dump_flags & TDF_DETAILS))
1247	    {
1248	      fprintf (dump_file, "Using default def bounds ");
1249	      print_generic_expr (dump_file, bnd, 0);
1250	      fprintf (dump_file, " for abnormal default def SSA name ");
1251	      print_generic_expr (dump_file, ptr, 0);
1252	      fprintf (dump_file, "\n");
1253	    }
1254	}
1255      else
1256	{
1257	  tree copy;
1258	  gimple def = SSA_NAME_DEF_STMT (ptr);
1259	  gimple assign;
1260	  gimple_stmt_iterator gsi;
1261
1262	  if (bnd_var)
1263	    copy = make_ssa_name (bnd_var, gimple_build_nop ());
1264	  else
1265	    copy = make_temp_ssa_name (pointer_bounds_type_node,
1266				       gimple_build_nop (),
1267				       CHKP_BOUND_TMP_NAME);
1268	  bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1269	  assign = gimple_build_assign (copy, bnd);
1270
1271	  if (dump_file && (dump_flags & TDF_DETAILS))
1272	    {
1273	      fprintf (dump_file, "Creating a copy of bounds ");
1274	      print_generic_expr (dump_file, bnd, 0);
1275	      fprintf (dump_file, " for abnormal SSA name ");
1276	      print_generic_expr (dump_file, ptr, 0);
1277	      fprintf (dump_file, "\n");
1278	    }
1279
1280	  if (gimple_code (def) == GIMPLE_NOP)
1281	    {
1282	      gsi = gsi_last_bb (chkp_get_entry_block ());
1283	      if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1284		gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1285	      else
1286		gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1287	    }
1288	  else
1289	    {
1290	      gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1291	      /* Sometimes (e.g. when we load a pointer from a
1292		 memory) bounds are produced later than a pointer.
1293		 We need to insert bounds copy appropriately.  */
1294	      if (gimple_code (bnd_def) != GIMPLE_NOP
1295		  && stmt_dominates_stmt_p (def, bnd_def))
1296		gsi = gsi_for_stmt (bnd_def);
1297	      else
1298		gsi = gsi_for_stmt (def);
1299	      gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1300	    }
1301
1302	  bnd = copy;
1303	}
1304
1305      if (abnormal_ptr)
1306	bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1307    }
1308
1309  chkp_reg_bounds->put (ptr, bnd);
1310
1311  if (dump_file && (dump_flags & TDF_DETAILS))
1312    {
1313      fprintf (dump_file, "Regsitered bound ");
1314      print_generic_expr (dump_file, bnd, 0);
1315      fprintf (dump_file, " for pointer ");
1316      print_generic_expr (dump_file, ptr, 0);
1317      fprintf (dump_file, "\n");
1318    }
1319
1320  return bnd;
1321}
1322
1323/* Get bounds registered for object PTR in global bounds table.  */
1324static tree
1325chkp_get_registered_bounds (tree ptr)
1326{
1327  tree *slot;
1328
1329  if (!chkp_reg_bounds)
1330    return NULL_TREE;
1331
1332  slot = chkp_reg_bounds->get (ptr);
1333  return slot ? *slot : NULL_TREE;
1334}
1335
1336/* Add bound retvals to return statement pointed by GSI.  */
1337
1338static void
1339chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1340{
1341  greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1342  tree retval = gimple_return_retval (ret);
1343  tree ret_decl = DECL_RESULT (cfun->decl);
1344  tree bounds;
1345
1346  if (!retval)
1347    return;
1348
1349  if (BOUNDED_P (ret_decl))
1350    {
1351      bounds = chkp_find_bounds (retval, gsi);
1352      bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1353      gimple_return_set_retbnd (ret, bounds);
1354    }
1355
1356  update_stmt (ret);
1357}
1358
1359/* Force OP to be suitable for using as an argument for call.
1360   New statements (if any) go to SEQ.  */
1361static tree
1362chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1363{
1364  gimple_seq stmts;
1365  gimple_stmt_iterator si;
1366
1367  op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1368
1369  for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1370    chkp_mark_stmt (gsi_stmt (si));
1371
1372  gimple_seq_add_seq (seq, stmts);
1373
1374  return op;
1375}
1376
1377/* Generate lower bound check for memory access by ADDR.
1378   Check is inserted before the position pointed by ITER.
1379   DIRFLAG indicates whether memory access is load or store.  */
1380static void
1381chkp_check_lower (tree addr, tree bounds,
1382		  gimple_stmt_iterator iter,
1383		  location_t location,
1384		  tree dirflag)
1385{
1386  gimple_seq seq;
1387  gimple check;
1388  tree node;
1389
1390  if (!chkp_function_instrumented_p (current_function_decl)
1391      && bounds == chkp_get_zero_bounds ())
1392    return;
1393
1394  if (dirflag == integer_zero_node
1395      && !flag_chkp_check_read)
1396    return;
1397
1398  if (dirflag == integer_one_node
1399      && !flag_chkp_check_write)
1400    return;
1401
1402  seq = NULL;
1403
1404  node = chkp_force_gimple_call_op (addr, &seq);
1405
1406  check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1407  chkp_mark_stmt (check);
1408  gimple_call_set_with_bounds (check, true);
1409  gimple_set_location (check, location);
1410  gimple_seq_add_stmt (&seq, check);
1411
1412  gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1413
1414  if (dump_file && (dump_flags & TDF_DETAILS))
1415    {
1416      gimple before = gsi_stmt (iter);
1417      fprintf (dump_file, "Generated lower bound check for statement ");
1418      print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1419      fprintf (dump_file, "  ");
1420      print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1421    }
1422}
1423
1424/* Generate upper bound check for memory access by ADDR.
1425   Check is inserted before the position pointed by ITER.
1426   DIRFLAG indicates whether memory access is load or store.  */
1427static void
1428chkp_check_upper (tree addr, tree bounds,
1429		  gimple_stmt_iterator iter,
1430		  location_t location,
1431		  tree dirflag)
1432{
1433  gimple_seq seq;
1434  gimple check;
1435  tree node;
1436
1437  if (!chkp_function_instrumented_p (current_function_decl)
1438      && bounds == chkp_get_zero_bounds ())
1439    return;
1440
1441  if (dirflag == integer_zero_node
1442      && !flag_chkp_check_read)
1443    return;
1444
1445  if (dirflag == integer_one_node
1446      && !flag_chkp_check_write)
1447    return;
1448
1449  seq = NULL;
1450
1451  node = chkp_force_gimple_call_op (addr, &seq);
1452
1453  check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1454  chkp_mark_stmt (check);
1455  gimple_call_set_with_bounds (check, true);
1456  gimple_set_location (check, location);
1457  gimple_seq_add_stmt (&seq, check);
1458
1459  gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1460
1461  if (dump_file && (dump_flags & TDF_DETAILS))
1462    {
1463      gimple before = gsi_stmt (iter);
1464      fprintf (dump_file, "Generated upper bound check for statement ");
1465      print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1466      fprintf (dump_file, "  ");
1467      print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1468    }
1469}
1470
1471/* Generate lower and upper bound checks for memory access
1472   to memory slot [FIRST, LAST] againsr BOUNDS.  Checks
1473   are inserted before the position pointed by ITER.
1474   DIRFLAG indicates whether memory access is load or store.  */
1475void
1476chkp_check_mem_access (tree first, tree last, tree bounds,
1477		       gimple_stmt_iterator iter,
1478		       location_t location,
1479		       tree dirflag)
1480{
1481  chkp_check_lower (first, bounds, iter, location, dirflag);
1482  chkp_check_upper (last, bounds, iter, location, dirflag);
1483}
1484
1485/* Replace call to _bnd_chk_* pointed by GSI with
1486   bndcu and bndcl calls.  DIRFLAG determines whether
1487   check is for read or write.  */
1488
1489void
1490chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1491				    tree dirflag)
1492{
1493  gimple_stmt_iterator call_iter = *gsi;
1494  gimple call = gsi_stmt (*gsi);
1495  tree fndecl = gimple_call_fndecl (call);
1496  tree addr = gimple_call_arg (call, 0);
1497  tree bounds = chkp_find_bounds (addr, gsi);
1498
1499  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1500      || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1501    chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1502
1503  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1504    chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1505
1506  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1507    {
1508      tree size = gimple_call_arg (call, 1);
1509      addr = fold_build_pointer_plus (addr, size);
1510      addr = fold_build_pointer_plus_hwi (addr, -1);
1511      chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1512    }
1513
1514  gsi_remove (&call_iter, true);
1515}
1516
1517/* Replace call to _bnd_get_ptr_* pointed by GSI with
1518   corresponding bounds extract call.  */
1519
1520void
1521chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1522{
1523  gimple call = gsi_stmt (*gsi);
1524  tree fndecl = gimple_call_fndecl (call);
1525  tree addr = gimple_call_arg (call, 0);
1526  tree bounds = chkp_find_bounds (addr, gsi);
1527  gimple extract;
1528
1529  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1530    fndecl = chkp_extract_lower_fndecl;
1531  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1532    fndecl = chkp_extract_upper_fndecl;
1533  else
1534    gcc_unreachable ();
1535
1536  extract = gimple_build_call (fndecl, 1, bounds);
1537  gimple_call_set_lhs (extract, gimple_call_lhs (call));
1538  chkp_mark_stmt (extract);
1539
1540  gsi_replace (gsi, extract, false);
1541}
1542
1543/* Return COMPONENT_REF accessing FIELD in OBJ.  */
1544static tree
1545chkp_build_component_ref (tree obj, tree field)
1546{
1547  tree res;
1548
1549  /* If object is TMR then we do not use component_ref but
1550     add offset instead.  We need it to be able to get addr
1551     of the reasult later.  */
1552  if (TREE_CODE (obj) == TARGET_MEM_REF)
1553    {
1554      tree offs = TMR_OFFSET (obj);
1555      offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1556				      offs, DECL_FIELD_OFFSET (field));
1557
1558      gcc_assert (offs);
1559
1560      res = copy_node (obj);
1561      TREE_TYPE (res) = TREE_TYPE (field);
1562      TMR_OFFSET (res) = offs;
1563    }
1564  else
1565    res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1566
1567  return res;
1568}
1569
1570/* Return ARRAY_REF for array ARR and index IDX with
1571   specified element type ETYPE and element size ESIZE.  */
1572static tree
1573chkp_build_array_ref (tree arr, tree etype, tree esize,
1574		      unsigned HOST_WIDE_INT idx)
1575{
1576  tree index = build_int_cst (size_type_node, idx);
1577  tree res;
1578
1579  /* If object is TMR then we do not use array_ref but
1580     add offset instead.  We need it to be able to get addr
1581     of the reasult later.  */
1582  if (TREE_CODE (arr) == TARGET_MEM_REF)
1583    {
1584      tree offs = TMR_OFFSET (arr);
1585
1586      esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1587				     esize, index);
1588      gcc_assert(esize);
1589
1590      offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1591				    offs, esize);
1592      gcc_assert (offs);
1593
1594      res = copy_node (arr);
1595      TREE_TYPE (res) = etype;
1596      TMR_OFFSET (res) = offs;
1597    }
1598  else
1599    res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1600
1601  return res;
1602}
1603
1604/* Helper function for chkp_add_bounds_to_call_stmt.
1605   Fill ALL_BOUNDS output array with created bounds.
1606
1607   OFFS is used for recursive calls and holds basic
1608   offset of TYPE in outer structure in bits.
1609
1610   ITER points a position where bounds are searched.
1611
1612   ALL_BOUNDS[i] is filled with elem bounds if there
1613   is a field in TYPE which has pointer type and offset
1614   equal to i * POINTER_SIZE in bits.  */
1615static void
1616chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1617			   HOST_WIDE_INT offs,
1618			   gimple_stmt_iterator *iter)
1619{
1620  tree type = TREE_TYPE (elem);
1621
1622  if (BOUNDED_TYPE_P (type))
1623    {
1624      if (!all_bounds[offs / POINTER_SIZE])
1625	{
1626	  tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1627	  gimple assign = gimple_build_assign (temp, elem);
1628	  gimple_stmt_iterator gsi;
1629
1630	  gsi_insert_before (iter, assign, GSI_SAME_STMT);
1631	  gsi = gsi_for_stmt (assign);
1632
1633	  all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1634	}
1635    }
1636  else if (RECORD_OR_UNION_TYPE_P (type))
1637    {
1638      tree field;
1639
1640      for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1641	if (TREE_CODE (field) == FIELD_DECL)
1642	  {
1643	    tree base = unshare_expr (elem);
1644	    tree field_ref = chkp_build_component_ref (base, field);
1645	    HOST_WIDE_INT field_offs
1646	      = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1647	    if (DECL_FIELD_OFFSET (field))
1648	      field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1649
1650	    chkp_find_bounds_for_elem (field_ref, all_bounds,
1651				       offs + field_offs, iter);
1652	  }
1653    }
1654  else if (TREE_CODE (type) == ARRAY_TYPE)
1655    {
1656      tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1657      tree etype = TREE_TYPE (type);
1658      HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1659      unsigned HOST_WIDE_INT cur;
1660
1661      if (!maxval || integer_minus_onep (maxval))
1662	return;
1663
1664      for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1665	{
1666	  tree base = unshare_expr (elem);
1667	  tree arr_elem = chkp_build_array_ref (base, etype,
1668						TYPE_SIZE (etype),
1669						cur);
1670	  chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1671				     iter);
1672	}
1673    }
1674}
1675
1676/* Fill HAVE_BOUND output bitmap with information about
1677   bounds requred for object of type TYPE.
1678
1679   OFFS is used for recursive calls and holds basic
1680   offset of TYPE in outer structure in bits.
1681
1682   HAVE_BOUND[i] is set to 1 if there is a field
1683   in TYPE which has pointer type and offset
1684   equal to i * POINTER_SIZE - OFFS in bits.  */
1685void
1686chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1687			 HOST_WIDE_INT offs)
1688{
1689  if (BOUNDED_TYPE_P (type))
1690    bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1691  else if (RECORD_OR_UNION_TYPE_P (type))
1692    {
1693      tree field;
1694
1695      for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1696	if (TREE_CODE (field) == FIELD_DECL)
1697	  {
1698	    HOST_WIDE_INT field_offs
1699	      = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1700	    if (DECL_FIELD_OFFSET (field))
1701	      field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1702	    chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1703				     offs + field_offs);
1704	  }
1705    }
1706  else if (TREE_CODE (type) == ARRAY_TYPE)
1707    {
1708      tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1709      tree etype = TREE_TYPE (type);
1710      HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1711      unsigned HOST_WIDE_INT cur;
1712
1713      if (!maxval
1714	  || TREE_CODE (maxval) != INTEGER_CST
1715	  || integer_minus_onep (maxval))
1716	return;
1717
1718      for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1719	chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1720    }
1721}
1722
1723/* Fill bitmap RES with information about bounds for
1724   type TYPE.  See chkp_find_bound_slots_1 for more
1725   details.  */
1726void
1727chkp_find_bound_slots (const_tree type, bitmap res)
1728{
1729  bitmap_clear (res);
1730  chkp_find_bound_slots_1 (type, res, 0);
1731}
1732
1733/* Return 1 if call to FNDECL should be instrumented
1734   and 0 otherwise.  */
1735
1736static bool
1737chkp_instrument_normal_builtin (tree fndecl)
1738{
1739  switch (DECL_FUNCTION_CODE (fndecl))
1740    {
1741    case BUILT_IN_STRLEN:
1742    case BUILT_IN_STRCPY:
1743    case BUILT_IN_STRNCPY:
1744    case BUILT_IN_STPCPY:
1745    case BUILT_IN_STPNCPY:
1746    case BUILT_IN_STRCAT:
1747    case BUILT_IN_STRNCAT:
1748    case BUILT_IN_MEMCPY:
1749    case BUILT_IN_MEMPCPY:
1750    case BUILT_IN_MEMSET:
1751    case BUILT_IN_MEMMOVE:
1752    case BUILT_IN_BZERO:
1753    case BUILT_IN_STRCMP:
1754    case BUILT_IN_STRNCMP:
1755    case BUILT_IN_BCMP:
1756    case BUILT_IN_MEMCMP:
1757    case BUILT_IN_MEMCPY_CHK:
1758    case BUILT_IN_MEMPCPY_CHK:
1759    case BUILT_IN_MEMMOVE_CHK:
1760    case BUILT_IN_MEMSET_CHK:
1761    case BUILT_IN_STRCPY_CHK:
1762    case BUILT_IN_STRNCPY_CHK:
1763    case BUILT_IN_STPCPY_CHK:
1764    case BUILT_IN_STPNCPY_CHK:
1765    case BUILT_IN_STRCAT_CHK:
1766    case BUILT_IN_STRNCAT_CHK:
1767    case BUILT_IN_MALLOC:
1768    case BUILT_IN_CALLOC:
1769    case BUILT_IN_REALLOC:
1770      return 1;
1771
1772    default:
1773      return 0;
1774    }
1775}
1776
1777/* Add bound arguments to call statement pointed by GSI.
1778   Also performs a replacement of user checker builtins calls
1779   with internal ones.  */
1780
1781static void
1782chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1783{
1784  gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1785  unsigned arg_no = 0;
1786  tree fndecl = gimple_call_fndecl (call);
1787  tree fntype;
1788  tree first_formal_arg;
1789  tree arg;
1790  bool use_fntype = false;
1791  tree op;
1792  ssa_op_iter iter;
1793  gcall *new_call;
1794
1795  /* Do nothing for internal functions.  */
1796  if (gimple_call_internal_p (call))
1797    return;
1798
1799  fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1800
1801  /* Do nothing if back-end builtin is called.  */
1802  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1803    return;
1804
1805  /* Do nothing for some middle-end builtins.  */
1806  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1807      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1808    return;
1809
1810  /* Do nothing for calls to not instrumentable functions.  */
1811  if (fndecl && !chkp_instrumentable_p (fndecl))
1812    return;
1813
1814  /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1815     and CHKP_COPY_PTR_BOUNDS.  */
1816  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1817      && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1818	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1819	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1820	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1821    return;
1822
1823  /* Check user builtins are replaced with checks.  */
1824  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1825      && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1826	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1827	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1828    {
1829      chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1830      return;
1831    }
1832
1833  /* Check user builtins are replaced with bound extract.  */
1834  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1835      && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1836	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1837    {
1838      chkp_replace_extract_builtin (gsi);
1839      return;
1840    }
1841
1842  /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1843     target narrow bounds call.  */
1844  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1845      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1846    {
1847      tree arg = gimple_call_arg (call, 1);
1848      tree bounds = chkp_find_bounds (arg, gsi);
1849
1850      gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1851      gimple_call_set_arg (call, 1, bounds);
1852      update_stmt (call);
1853
1854      return;
1855    }
1856
1857  /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1858     bndstx call.  */
1859  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1860      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1861    {
1862      tree addr = gimple_call_arg (call, 0);
1863      tree ptr = gimple_call_arg (call, 1);
1864      tree bounds = chkp_find_bounds (ptr, gsi);
1865      gimple_stmt_iterator iter = gsi_for_stmt (call);
1866
1867      chkp_build_bndstx (addr, ptr, bounds, gsi);
1868      gsi_remove (&iter, true);
1869
1870      return;
1871    }
1872
1873  if (!flag_chkp_instrument_calls)
1874    return;
1875
1876  /* We instrument only some subset of builtins.  We also instrument
1877     builtin calls to be inlined.  */
1878  if (fndecl
1879      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1880      && !chkp_instrument_normal_builtin (fndecl))
1881    {
1882      if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1883	return;
1884
1885      struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1886      if (!clone
1887	  || !gimple_has_body_p (clone->decl))
1888	return;
1889    }
1890
1891  /* If function decl is available then use it for
1892     formal arguments list.  Otherwise use function type.  */
1893  if (fndecl && DECL_ARGUMENTS (fndecl))
1894    first_formal_arg = DECL_ARGUMENTS (fndecl);
1895  else
1896    {
1897      first_formal_arg = TYPE_ARG_TYPES (fntype);
1898      use_fntype = true;
1899    }
1900
1901  /* Fill vector of new call args.  */
1902  vec<tree> new_args = vNULL;
1903  new_args.create (gimple_call_num_args (call));
1904  arg = first_formal_arg;
1905  for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1906    {
1907      tree call_arg = gimple_call_arg (call, arg_no);
1908      tree type;
1909
1910      /* Get arg type using formal argument description
1911	 or actual argument type.  */
1912      if (arg)
1913	if (use_fntype)
1914	  if (TREE_VALUE (arg) != void_type_node)
1915	    {
1916	      type = TREE_VALUE (arg);
1917	      arg = TREE_CHAIN (arg);
1918	    }
1919	  else
1920	    type = TREE_TYPE (call_arg);
1921	else
1922	  {
1923	    type = TREE_TYPE (arg);
1924	    arg = TREE_CHAIN (arg);
1925	  }
1926      else
1927	type = TREE_TYPE (call_arg);
1928
1929      new_args.safe_push (call_arg);
1930
1931      if (BOUNDED_TYPE_P (type)
1932	  || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1933	new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1934      else if (chkp_type_has_pointer (type))
1935	{
1936	  HOST_WIDE_INT max_bounds
1937	    = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1938	  tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1939	  HOST_WIDE_INT bnd_no;
1940
1941	  memset (all_bounds, 0, sizeof (tree) * max_bounds);
1942
1943	  chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1944
1945	  for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1946	    if (all_bounds[bnd_no])
1947	      new_args.safe_push (all_bounds[bnd_no]);
1948
1949           free (all_bounds);
1950	}
1951    }
1952
1953  if (new_args.length () == gimple_call_num_args (call))
1954    new_call = call;
1955  else
1956    {
1957      new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1958      gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1959      gimple_call_copy_flags (new_call, call);
1960      gimple_call_set_chain (new_call, gimple_call_chain (call));
1961    }
1962  new_args.release ();
1963
1964  /* For direct calls fndecl is replaced with instrumented version.  */
1965  if (fndecl)
1966    {
1967      tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1968      gimple_call_set_fndecl (new_call, new_decl);
1969      gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1970    }
1971  /* For indirect call we should fix function pointer type if
1972     pass some bounds.  */
1973  else if (new_call != call)
1974    {
1975      tree type = gimple_call_fntype (call);
1976      type = chkp_copy_function_type_adding_bounds (type);
1977      gimple_call_set_fntype (new_call, type);
1978    }
1979
1980  /* replace old call statement with the new one.  */
1981  if (call != new_call)
1982    {
1983      FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1984	{
1985	  SSA_NAME_DEF_STMT (op) = new_call;
1986	}
1987      gsi_replace (gsi, new_call, true);
1988    }
1989  else
1990    update_stmt (new_call);
1991
1992  gimple_call_set_with_bounds (new_call, true);
1993}
1994
1995/* Return constant static bounds var with specified bounds LB and UB.
1996   If such var does not exists then new var is created with specified NAME.  */
1997static tree
1998chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1999			       HOST_WIDE_INT ub,
2000			       const char *name)
2001{
2002  tree id = get_identifier (name);
2003  tree var;
2004  varpool_node *node;
2005  symtab_node *snode;
2006
2007  var  = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
2008		     pointer_bounds_type_node);
2009  TREE_STATIC (var) = 1;
2010  TREE_PUBLIC (var) = 1;
2011
2012  /* With LTO we may have constant bounds already in varpool.
2013     Try to find it.  */
2014  if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
2015    {
2016      /* We don't allow this symbol usage for non bounds.  */
2017      if (snode->type != SYMTAB_VARIABLE
2018	  || !POINTER_BOUNDS_P (snode->decl))
2019	sorry ("-fcheck-pointer-bounds requires '%s' "
2020	       "name for internal usage",
2021	       IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
2022
2023      return snode->decl;
2024    }
2025
2026  TREE_USED (var) = 1;
2027  TREE_READONLY (var) = 1;
2028  TREE_ADDRESSABLE (var) = 0;
2029  DECL_ARTIFICIAL (var) = 1;
2030  DECL_READ_P (var) = 1;
2031  DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2032  make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2033  /* We may use this symbol during ctors generation in chkp_finish_file
2034     when all symbols are emitted.  Force output to avoid undefined
2035     symbols in ctors.  */
2036  node = varpool_node::get_create (var);
2037  node->force_output = 1;
2038
2039  varpool_node::finalize_decl (var);
2040
2041  return var;
2042}
2043
2044/* Generate code to make bounds with specified lower bound LB and SIZE.
2045   if AFTER is 1 then code is inserted after position pointed by ITER
2046   otherwise code is inserted before position pointed by ITER.
2047   If ITER is NULL then code is added to entry block.  */
2048static tree
2049chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2050{
2051  gimple_seq seq;
2052  gimple_stmt_iterator gsi;
2053  gimple stmt;
2054  tree bounds;
2055
2056  if (iter)
2057    gsi = *iter;
2058  else
2059    gsi = gsi_start_bb (chkp_get_entry_block ());
2060
2061  seq = NULL;
2062
2063  lb = chkp_force_gimple_call_op (lb, &seq);
2064  size = chkp_force_gimple_call_op (size, &seq);
2065
2066  stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2067  chkp_mark_stmt (stmt);
2068
2069  bounds = chkp_get_tmp_reg (stmt);
2070  gimple_call_set_lhs (stmt, bounds);
2071
2072  gimple_seq_add_stmt (&seq, stmt);
2073
2074  if (iter && after)
2075    gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2076  else
2077    gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2078
2079  if (dump_file && (dump_flags & TDF_DETAILS))
2080    {
2081      fprintf (dump_file, "Made bounds: ");
2082      print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2083      if (iter)
2084	{
2085	  fprintf (dump_file, "  inserted before statement: ");
2086	  print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2087	}
2088      else
2089	fprintf (dump_file, "  at function entry\n");
2090    }
2091
2092  /* update_stmt (stmt); */
2093
2094  return bounds;
2095}
2096
2097/* Return var holding zero bounds.  */
2098tree
2099chkp_get_zero_bounds_var (void)
2100{
2101  if (!chkp_zero_bounds_var)
2102    chkp_zero_bounds_var
2103      = chkp_make_static_const_bounds (0, -1,
2104				       CHKP_ZERO_BOUNDS_VAR_NAME);
2105  return chkp_zero_bounds_var;
2106}
2107
2108/* Return var holding none bounds.  */
2109tree
2110chkp_get_none_bounds_var (void)
2111{
2112  if (!chkp_none_bounds_var)
2113    chkp_none_bounds_var
2114      = chkp_make_static_const_bounds (-1, 0,
2115				       CHKP_NONE_BOUNDS_VAR_NAME);
2116  return chkp_none_bounds_var;
2117}
2118
2119/* Return SSA_NAME used to represent zero bounds.  */
2120static tree
2121chkp_get_zero_bounds (void)
2122{
2123  if (zero_bounds)
2124    return zero_bounds;
2125
2126  if (dump_file && (dump_flags & TDF_DETAILS))
2127    fprintf (dump_file, "Creating zero bounds...");
2128
2129  if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2130      || flag_chkp_use_static_const_bounds > 0)
2131    {
2132      gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2133      gimple stmt;
2134
2135      zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2136      stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2137      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2138    }
2139  else
2140    zero_bounds = chkp_make_bounds (integer_zero_node,
2141				    integer_zero_node,
2142				    NULL,
2143				    false);
2144
2145  return zero_bounds;
2146}
2147
2148/* Return SSA_NAME used to represent none bounds.  */
2149static tree
2150chkp_get_none_bounds (void)
2151{
2152  if (none_bounds)
2153    return none_bounds;
2154
2155  if (dump_file && (dump_flags & TDF_DETAILS))
2156    fprintf (dump_file, "Creating none bounds...");
2157
2158
2159  if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2160      || flag_chkp_use_static_const_bounds > 0)
2161    {
2162      gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2163      gimple stmt;
2164
2165      none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2166      stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2167      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2168    }
2169  else
2170    none_bounds = chkp_make_bounds (integer_minus_one_node,
2171				    build_int_cst (size_type_node, 2),
2172				    NULL,
2173				    false);
2174
2175  return none_bounds;
2176}
2177
2178/* Return bounds to be used as a result of operation which
2179   should not create poiunter (e.g. MULT_EXPR).  */
2180static tree
2181chkp_get_invalid_op_bounds (void)
2182{
2183  return chkp_get_zero_bounds ();
2184}
2185
2186/* Return bounds to be used for loads of non-pointer values.  */
2187static tree
2188chkp_get_nonpointer_load_bounds (void)
2189{
2190  return chkp_get_zero_bounds ();
2191}
2192
2193/* Return 1 if may use bndret call to get bounds for pointer
2194   returned by CALL.  */
2195static bool
2196chkp_call_returns_bounds_p (gcall *call)
2197{
2198  if (gimple_call_internal_p (call))
2199    return false;
2200
2201  if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2202      || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2203    return true;
2204
2205  if (gimple_call_with_bounds_p (call))
2206    return true;
2207
2208  tree fndecl = gimple_call_fndecl (call);
2209
2210  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2211    return false;
2212
2213  if (fndecl && !chkp_instrumentable_p (fndecl))
2214    return false;
2215
2216  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2217    {
2218      if (chkp_instrument_normal_builtin (fndecl))
2219	return true;
2220
2221      if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2222	return false;
2223
2224      struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2225      return (clone && gimple_has_body_p (clone->decl));
2226    }
2227
2228  return true;
2229}
2230
2231/* Build bounds returned by CALL.  */
2232static tree
2233chkp_build_returned_bound (gcall *call)
2234{
2235  gimple_stmt_iterator gsi;
2236  tree bounds;
2237  gimple stmt;
2238  tree fndecl = gimple_call_fndecl (call);
2239  unsigned int retflags;
2240
2241  /* To avoid fixing alloca expands in targets we handle
2242     it separately.  */
2243  if (fndecl
2244      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2245      && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2246	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2247    {
2248      tree size = gimple_call_arg (call, 0);
2249      tree lb = gimple_call_lhs (call);
2250      gimple_stmt_iterator iter = gsi_for_stmt (call);
2251      bounds = chkp_make_bounds (lb, size, &iter, true);
2252    }
2253  /* We know bounds returned by set_bounds builtin call.  */
2254  else if (fndecl
2255	   && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2256	   && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2257    {
2258      tree lb = gimple_call_arg (call, 0);
2259      tree size = gimple_call_arg (call, 1);
2260      gimple_stmt_iterator iter = gsi_for_stmt (call);
2261      bounds = chkp_make_bounds (lb, size, &iter, true);
2262    }
2263  /* Detect bounds initialization calls.  */
2264  else if (fndecl
2265      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2266      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2267    bounds = chkp_get_zero_bounds ();
2268  /* Detect bounds nullification calls.  */
2269  else if (fndecl
2270      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2271      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2272    bounds = chkp_get_none_bounds ();
2273  /* Detect bounds copy calls.  */
2274  else if (fndecl
2275      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2276      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2277    {
2278      gimple_stmt_iterator iter = gsi_for_stmt (call);
2279      bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2280    }
2281  /* Do not use retbnd when returned bounds are equal to some
2282     of passed bounds.  */
2283  else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2284	   && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2285    {
2286      gimple_stmt_iterator iter = gsi_for_stmt (call);
2287      unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2288      if (gimple_call_with_bounds_p (call))
2289	{
2290	  for (argno = 0; argno < gimple_call_num_args (call); argno++)
2291	    if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2292	      {
2293		if (retarg)
2294		  retarg--;
2295		else
2296		  break;
2297	      }
2298	}
2299      else
2300	argno = retarg;
2301
2302      bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2303    }
2304  else if (chkp_call_returns_bounds_p (call))
2305    {
2306      gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2307
2308      /* In general case build checker builtin call to
2309	 obtain returned bounds.  */
2310      stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2311				gimple_call_lhs (call));
2312      chkp_mark_stmt (stmt);
2313
2314      gsi = gsi_for_stmt (call);
2315      gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2316
2317      bounds = chkp_get_tmp_reg (stmt);
2318      gimple_call_set_lhs (stmt, bounds);
2319
2320      update_stmt (stmt);
2321    }
2322  else
2323    bounds = chkp_get_zero_bounds ();
2324
2325  if (dump_file && (dump_flags & TDF_DETAILS))
2326    {
2327      fprintf (dump_file, "Built returned bounds (");
2328      print_generic_expr (dump_file, bounds, 0);
2329      fprintf (dump_file, ") for call: ");
2330      print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2331    }
2332
2333  bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2334
2335  return bounds;
2336}
2337
2338/* Return bounds used as returned by call
2339   which produced SSA name VAL.  */
2340gcall *
2341chkp_retbnd_call_by_val (tree val)
2342{
2343  if (TREE_CODE (val) != SSA_NAME)
2344    return NULL;
2345
2346  gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2347
2348  imm_use_iterator use_iter;
2349  use_operand_p use_p;
2350  FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2351    if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2352	&& gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2353      return as_a <gcall *> (USE_STMT (use_p));
2354
2355  return NULL;
2356}
2357
2358/* Check the next parameter for the given PARM is bounds
2359   and return it's default SSA_NAME (create if required).  */
2360static tree
2361chkp_get_next_bounds_parm (tree parm)
2362{
2363  tree bounds = TREE_CHAIN (parm);
2364  gcc_assert (POINTER_BOUNDS_P (bounds));
2365  bounds = ssa_default_def (cfun, bounds);
2366  if (!bounds)
2367    {
2368      bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2369      set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2370    }
2371  return bounds;
2372}
2373
2374/* Return bounds to be used for input argument PARM.  */
2375static tree
2376chkp_get_bound_for_parm (tree parm)
2377{
2378  tree decl = SSA_NAME_VAR (parm);
2379  tree bounds;
2380
2381  gcc_assert (TREE_CODE (decl) == PARM_DECL);
2382
2383  bounds = chkp_get_registered_bounds (parm);
2384
2385  if (!bounds)
2386    bounds = chkp_get_registered_bounds (decl);
2387
2388  if (!bounds)
2389    {
2390      tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2391
2392      /* For static chain param we return zero bounds
2393	 because currently we do not check dereferences
2394	 of this pointer.  */
2395      if (cfun->static_chain_decl == decl)
2396	bounds = chkp_get_zero_bounds ();
2397      /* If non instrumented runtime is used then it may be useful
2398	 to use zero bounds for input arguments of main
2399	 function.  */
2400      else if (flag_chkp_zero_input_bounds_for_main
2401	       && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2402			  "main") == 0)
2403	bounds = chkp_get_zero_bounds ();
2404      else if (BOUNDED_P (parm))
2405	{
2406	  bounds = chkp_get_next_bounds_parm (decl);
2407	  bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2408
2409	  if (dump_file && (dump_flags & TDF_DETAILS))
2410	    {
2411	      fprintf (dump_file, "Built arg bounds (");
2412	      print_generic_expr (dump_file, bounds, 0);
2413	      fprintf (dump_file, ") for arg: ");
2414	      print_node (dump_file, "", decl, 0);
2415	    }
2416	}
2417      else
2418	bounds = chkp_get_zero_bounds ();
2419    }
2420
2421  if (!chkp_get_registered_bounds (parm))
2422    bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2423
2424  if (dump_file && (dump_flags & TDF_DETAILS))
2425    {
2426      fprintf (dump_file, "Using bounds ");
2427      print_generic_expr (dump_file, bounds, 0);
2428      fprintf (dump_file, " for parm ");
2429      print_generic_expr (dump_file, parm, 0);
2430      fprintf (dump_file, " of type ");
2431      print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2432      fprintf (dump_file, ".\n");
2433    }
2434
2435  return bounds;
2436}
2437
2438/* Build and return CALL_EXPR for bndstx builtin with specified
2439   arguments.  */
2440tree
2441chkp_build_bndldx_call (tree addr, tree ptr)
2442{
2443  tree fn = build1 (ADDR_EXPR,
2444		    build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2445		    chkp_bndldx_fndecl);
2446  tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2447			       fn, 2, addr, ptr);
2448  CALL_WITH_BOUNDS_P (call) = true;
2449  return call;
2450}
2451
2452/* Insert code to load bounds for PTR located by ADDR.
2453   Code is inserted after position pointed by GSI.
2454   Loaded bounds are returned.  */
2455static tree
2456chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2457{
2458  gimple_seq seq;
2459  gimple stmt;
2460  tree bounds;
2461
2462  seq = NULL;
2463
2464  addr = chkp_force_gimple_call_op (addr, &seq);
2465  ptr = chkp_force_gimple_call_op (ptr, &seq);
2466
2467  stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2468  chkp_mark_stmt (stmt);
2469  bounds = chkp_get_tmp_reg (stmt);
2470  gimple_call_set_lhs (stmt, bounds);
2471
2472  gimple_seq_add_stmt (&seq, stmt);
2473
2474  gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2475
2476  if (dump_file && (dump_flags & TDF_DETAILS))
2477    {
2478      fprintf (dump_file, "Generated bndldx for pointer ");
2479      print_generic_expr (dump_file, ptr, 0);
2480      fprintf (dump_file, ": ");
2481      print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2482    }
2483
2484  return bounds;
2485}
2486
2487/* Build and return CALL_EXPR for bndstx builtin with specified
2488   arguments.  */
2489tree
2490chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2491{
2492  tree fn = build1 (ADDR_EXPR,
2493		    build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2494		    chkp_bndstx_fndecl);
2495  tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2496			       fn, 3, ptr, bounds, addr);
2497  CALL_WITH_BOUNDS_P (call) = true;
2498  return call;
2499}
2500
2501/* Insert code to store BOUNDS for PTR stored by ADDR.
2502   New statements are inserted after position pointed
2503   by GSI.  */
2504void
2505chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2506		   gimple_stmt_iterator *gsi)
2507{
2508  gimple_seq seq;
2509  gimple stmt;
2510
2511  seq = NULL;
2512
2513  addr = chkp_force_gimple_call_op (addr, &seq);
2514  ptr = chkp_force_gimple_call_op (ptr, &seq);
2515
2516  stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2517  chkp_mark_stmt (stmt);
2518  gimple_call_set_with_bounds (stmt, true);
2519
2520  gimple_seq_add_stmt (&seq, stmt);
2521
2522  gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2523
2524  if (dump_file && (dump_flags & TDF_DETAILS))
2525    {
2526      fprintf (dump_file, "Generated bndstx for pointer store ");
2527      print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2528      print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2529    }
2530}
2531
2532/* Compute bounds for pointer NODE which was assigned in
2533   assignment statement ASSIGN.  Return computed bounds.  */
2534static tree
2535chkp_compute_bounds_for_assignment (tree node, gimple assign)
2536{
2537  enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2538  tree rhs1 = gimple_assign_rhs1 (assign);
2539  tree bounds = NULL_TREE;
2540  gimple_stmt_iterator iter = gsi_for_stmt (assign);
2541  tree base = NULL;
2542
2543  if (dump_file && (dump_flags & TDF_DETAILS))
2544    {
2545      fprintf (dump_file, "Computing bounds for assignment: ");
2546      print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2547    }
2548
2549  switch (rhs_code)
2550    {
2551    case MEM_REF:
2552    case TARGET_MEM_REF:
2553    case COMPONENT_REF:
2554    case ARRAY_REF:
2555      /* We need to load bounds from the bounds table.  */
2556      bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2557      break;
2558
2559    case VAR_DECL:
2560    case SSA_NAME:
2561    case ADDR_EXPR:
2562    case POINTER_PLUS_EXPR:
2563    case NOP_EXPR:
2564    case CONVERT_EXPR:
2565    case INTEGER_CST:
2566      /* Bounds are just propagated from RHS.  */
2567      bounds = chkp_find_bounds (rhs1, &iter);
2568      base = rhs1;
2569      break;
2570
2571    case VIEW_CONVERT_EXPR:
2572      /* Bounds are just propagated from RHS.  */
2573      bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2574      break;
2575
2576    case PARM_DECL:
2577      if (BOUNDED_P (rhs1))
2578	{
2579	  /* We need to load bounds from the bounds table.  */
2580	  bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2581				      node, &iter);
2582	  TREE_ADDRESSABLE (rhs1) = 1;
2583	}
2584      else
2585	bounds = chkp_get_nonpointer_load_bounds ();
2586      break;
2587
2588    case MINUS_EXPR:
2589    case PLUS_EXPR:
2590    case BIT_AND_EXPR:
2591    case BIT_IOR_EXPR:
2592    case BIT_XOR_EXPR:
2593      {
2594	tree rhs2 = gimple_assign_rhs2 (assign);
2595	tree bnd1 = chkp_find_bounds (rhs1, &iter);
2596	tree bnd2 = chkp_find_bounds (rhs2, &iter);
2597
2598	/* First we try to check types of operands.  If it
2599	   does not help then look at bound values.
2600
2601	   If some bounds are incomplete and other are
2602	   not proven to be valid (i.e. also incomplete
2603	   or invalid because value is not pointer) then
2604	   resulting value is incomplete and will be
2605	   recomputed later in chkp_finish_incomplete_bounds.  */
2606	if (BOUNDED_P (rhs1)
2607	    && !BOUNDED_P (rhs2))
2608	  bounds = bnd1;
2609	else if (BOUNDED_P (rhs2)
2610		 && !BOUNDED_P (rhs1)
2611		 && rhs_code != MINUS_EXPR)
2612	  bounds = bnd2;
2613	else if (chkp_incomplete_bounds (bnd1))
2614	  if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2615	      && !chkp_incomplete_bounds (bnd2))
2616	    bounds = bnd2;
2617	  else
2618	    bounds = incomplete_bounds;
2619	else if (chkp_incomplete_bounds (bnd2))
2620	  if (chkp_valid_bounds (bnd1)
2621	      && !chkp_incomplete_bounds (bnd1))
2622	    bounds = bnd1;
2623	  else
2624	    bounds = incomplete_bounds;
2625	else if (!chkp_valid_bounds (bnd1))
2626	  if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2627	    bounds = bnd2;
2628	  else if (bnd2 == chkp_get_zero_bounds ())
2629	    bounds = bnd2;
2630	  else
2631	    bounds = bnd1;
2632	else if (!chkp_valid_bounds (bnd2))
2633	  bounds = bnd1;
2634	else
2635	  /* Seems both operands may have valid bounds
2636	     (e.g. pointer minus pointer).  In such case
2637	     use default invalid op bounds.  */
2638	  bounds = chkp_get_invalid_op_bounds ();
2639
2640	base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2641      }
2642      break;
2643
2644    case BIT_NOT_EXPR:
2645    case NEGATE_EXPR:
2646    case LSHIFT_EXPR:
2647    case RSHIFT_EXPR:
2648    case LROTATE_EXPR:
2649    case RROTATE_EXPR:
2650    case EQ_EXPR:
2651    case NE_EXPR:
2652    case LT_EXPR:
2653    case LE_EXPR:
2654    case GT_EXPR:
2655    case GE_EXPR:
2656    case MULT_EXPR:
2657    case RDIV_EXPR:
2658    case TRUNC_DIV_EXPR:
2659    case FLOOR_DIV_EXPR:
2660    case CEIL_DIV_EXPR:
2661    case ROUND_DIV_EXPR:
2662    case TRUNC_MOD_EXPR:
2663    case FLOOR_MOD_EXPR:
2664    case CEIL_MOD_EXPR:
2665    case ROUND_MOD_EXPR:
2666    case EXACT_DIV_EXPR:
2667    case FIX_TRUNC_EXPR:
2668    case FLOAT_EXPR:
2669    case REALPART_EXPR:
2670    case IMAGPART_EXPR:
2671      /* No valid bounds may be produced by these exprs.  */
2672      bounds = chkp_get_invalid_op_bounds ();
2673      break;
2674
2675    case COND_EXPR:
2676      {
2677	tree val1 = gimple_assign_rhs2 (assign);
2678	tree val2 = gimple_assign_rhs3 (assign);
2679	tree bnd1 = chkp_find_bounds (val1, &iter);
2680	tree bnd2 = chkp_find_bounds (val2, &iter);
2681	gimple stmt;
2682
2683	if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2684	  bounds = incomplete_bounds;
2685	else if (bnd1 == bnd2)
2686	  bounds = bnd1;
2687	else
2688	  {
2689	    rhs1 = unshare_expr (rhs1);
2690
2691	    bounds = chkp_get_tmp_reg (assign);
2692	    stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2693	    gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2694
2695	    if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2696	      chkp_mark_invalid_bounds (bounds);
2697	  }
2698      }
2699      break;
2700
2701    case MAX_EXPR:
2702    case MIN_EXPR:
2703      {
2704	tree rhs2 = gimple_assign_rhs2 (assign);
2705	tree bnd1 = chkp_find_bounds (rhs1, &iter);
2706	tree bnd2 = chkp_find_bounds (rhs2, &iter);
2707
2708	if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2709	  bounds = incomplete_bounds;
2710	else if (bnd1 == bnd2)
2711	  bounds = bnd1;
2712	else
2713	  {
2714	    gimple stmt;
2715	    tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2716				boolean_type_node, rhs1, rhs2);
2717	    bounds = chkp_get_tmp_reg (assign);
2718	    stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2719
2720	    gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2721
2722	    if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2723	      chkp_mark_invalid_bounds (bounds);
2724	  }
2725      }
2726      break;
2727
2728    default:
2729      bounds = chkp_get_zero_bounds ();
2730      warning (0, "pointer bounds were lost due to unexpected expression %s",
2731	       get_tree_code_name (rhs_code));
2732    }
2733
2734  gcc_assert (bounds);
2735
2736  /* We may reuse bounds of other pointer we copy/modify.  But it is not
2737     allowed for abnormal ssa names.  If we produced a pointer using
2738     abnormal ssa name, we better make a bounds copy to avoid coalescing
2739     issues.  */
2740  if (base
2741      && TREE_CODE (base) == SSA_NAME
2742      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2743    {
2744      gimple stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2745      gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2746      bounds = gimple_assign_lhs (stmt);
2747    }
2748
2749  if (node)
2750    bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2751
2752  return bounds;
2753}
2754
2755/* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2756
2757   There are just few statement codes allowed: NOP (for default ssa names),
2758   ASSIGN, CALL, PHI, ASM.
2759
2760   Return computed bounds.  */
2761static tree
2762chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2763			       gphi_iterator *iter)
2764{
2765  tree var, bounds;
2766  enum gimple_code code = gimple_code (def_stmt);
2767  gphi *stmt;
2768
2769  if (dump_file && (dump_flags & TDF_DETAILS))
2770    {
2771      fprintf (dump_file, "Searching for bounds for node: ");
2772      print_generic_expr (dump_file, node, 0);
2773
2774      fprintf (dump_file, " using its definition: ");
2775      print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2776    }
2777
2778  switch (code)
2779    {
2780    case GIMPLE_NOP:
2781      var = SSA_NAME_VAR (node);
2782      switch (TREE_CODE (var))
2783	{
2784	case PARM_DECL:
2785	  bounds = chkp_get_bound_for_parm (node);
2786	  break;
2787
2788	case VAR_DECL:
2789	  /* For uninitialized pointers use none bounds.  */
2790	  bounds = chkp_get_none_bounds ();
2791	  bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2792	  break;
2793
2794	case RESULT_DECL:
2795	  {
2796	    tree base_type;
2797
2798	    gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2799
2800	    base_type = TREE_TYPE (TREE_TYPE (node));
2801
2802	    gcc_assert (TYPE_SIZE (base_type)
2803			&& TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2804			&& tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2805
2806	    bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2807				       NULL, false);
2808	    bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2809	  }
2810	  break;
2811
2812	default:
2813	  if (dump_file && (dump_flags & TDF_DETAILS))
2814	    {
2815	      fprintf (dump_file, "Unexpected var with no definition\n");
2816	      print_generic_expr (dump_file, var, 0);
2817	    }
2818	  internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2819			  get_tree_code_name (TREE_CODE (var)));
2820	}
2821      break;
2822
2823    case GIMPLE_ASSIGN:
2824      bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2825      break;
2826
2827    case GIMPLE_CALL:
2828      bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2829      break;
2830
2831    case GIMPLE_PHI:
2832      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2833	if (SSA_NAME_VAR (node))
2834	  var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2835	else
2836	  var = make_temp_ssa_name (pointer_bounds_type_node,
2837				    gimple_build_nop (),
2838				    CHKP_BOUND_TMP_NAME);
2839      else
2840	var = chkp_get_tmp_var ();
2841      stmt = create_phi_node (var, gimple_bb (def_stmt));
2842      bounds = gimple_phi_result (stmt);
2843      *iter = gsi_for_phi (stmt);
2844
2845      bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2846
2847      /* Created bounds do not have all phi args computed and
2848	 therefore we do not know if there is a valid source
2849	 of bounds for that node.  Therefore we mark bounds
2850	 as incomplete and then recompute them when all phi
2851	 args are computed.  */
2852      chkp_register_incomplete_bounds (bounds, node);
2853      break;
2854
2855    case GIMPLE_ASM:
2856      bounds = chkp_get_zero_bounds ();
2857      bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2858      break;
2859
2860    default:
2861      internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2862		      gimple_code_name[code]);
2863    }
2864
2865  return bounds;
2866}
2867
2868/* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE.  */
2869tree
2870chkp_build_make_bounds_call (tree lower_bound, tree size)
2871{
2872  tree call = build1 (ADDR_EXPR,
2873		      build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2874		      chkp_bndmk_fndecl);
2875  return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2876			  call, 2, lower_bound, size);
2877}
2878
2879/* Create static bounds var of specfified OBJ which is
2880   is either VAR_DECL or string constant.  */
2881static tree
2882chkp_make_static_bounds (tree obj)
2883{
2884  static int string_id = 1;
2885  static int var_id = 1;
2886  tree *slot;
2887  const char *var_name;
2888  char *bnd_var_name;
2889  tree bnd_var;
2890
2891  /* First check if we already have required var.  */
2892  if (chkp_static_var_bounds)
2893    {
2894      /* For vars we use assembler name as a key in
2895	 chkp_static_var_bounds map.  It allows to
2896	 avoid duplicating bound vars for decls
2897	 sharing assembler name.  */
2898      if (TREE_CODE (obj) == VAR_DECL)
2899	{
2900	  tree name = DECL_ASSEMBLER_NAME (obj);
2901	  slot = chkp_static_var_bounds->get (name);
2902	  if (slot)
2903	    return *slot;
2904	}
2905      else
2906	{
2907	  slot = chkp_static_var_bounds->get (obj);
2908	  if (slot)
2909	    return *slot;
2910	}
2911    }
2912
2913  /* Build decl for bounds var.  */
2914  if (TREE_CODE (obj) == VAR_DECL)
2915    {
2916      if (DECL_IGNORED_P (obj))
2917	{
2918	  bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2919	  sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2920	}
2921      else
2922	{
2923	  var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2924
2925	  /* For hidden symbols we want to skip first '*' char.  */
2926	  if (*var_name == '*')
2927	    var_name++;
2928
2929	  bnd_var_name = (char *) xmalloc (strlen (var_name)
2930					   + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2931	  strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2932	  strcat (bnd_var_name, var_name);
2933	}
2934
2935      bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2936			    get_identifier (bnd_var_name),
2937			    pointer_bounds_type_node);
2938
2939      /* Address of the obj will be used as lower bound.  */
2940      TREE_ADDRESSABLE (obj) = 1;
2941    }
2942  else
2943    {
2944      bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2945      sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2946
2947      bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2948			    get_identifier (bnd_var_name),
2949			    pointer_bounds_type_node);
2950    }
2951
2952  TREE_PUBLIC (bnd_var) = 0;
2953  TREE_USED (bnd_var) = 1;
2954  TREE_READONLY (bnd_var) = 0;
2955  TREE_STATIC (bnd_var) = 1;
2956  TREE_ADDRESSABLE (bnd_var) = 0;
2957  DECL_ARTIFICIAL (bnd_var) = 1;
2958  DECL_COMMON (bnd_var) = 1;
2959  DECL_COMDAT (bnd_var) = 1;
2960  DECL_READ_P (bnd_var) = 1;
2961  DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2962  /* Force output similar to constant bounds.
2963     See chkp_make_static_const_bounds. */
2964  varpool_node::get_create (bnd_var)->force_output = 1;
2965  /* Mark symbol as requiring bounds initialization.  */
2966  varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2967  varpool_node::finalize_decl (bnd_var);
2968
2969  /* Add created var to the map to use it for other references
2970     to obj.  */
2971  if (!chkp_static_var_bounds)
2972    chkp_static_var_bounds = new hash_map<tree, tree>;
2973
2974  if (TREE_CODE (obj) == VAR_DECL)
2975    {
2976      tree name = DECL_ASSEMBLER_NAME (obj);
2977      chkp_static_var_bounds->put (name, bnd_var);
2978    }
2979  else
2980    chkp_static_var_bounds->put (obj, bnd_var);
2981
2982  return bnd_var;
2983}
2984
2985/* When var has incomplete type we cannot get size to
2986   compute its bounds.  In such cases we use checker
2987   builtin call which determines object size at runtime.  */
2988static tree
2989chkp_generate_extern_var_bounds (tree var)
2990{
2991  tree bounds, size_reloc, lb, size, max_size, cond;
2992  gimple_stmt_iterator gsi;
2993  gimple_seq seq = NULL;
2994  gimple stmt;
2995
2996  /* If instrumentation is not enabled for vars having
2997     incomplete type then just return zero bounds to avoid
2998     checks for this var.  */
2999  if (!flag_chkp_incomplete_type)
3000    return chkp_get_zero_bounds ();
3001
3002  if (dump_file && (dump_flags & TDF_DETAILS))
3003    {
3004      fprintf (dump_file, "Generating bounds for extern symbol '");
3005      print_generic_expr (dump_file, var, 0);
3006      fprintf (dump_file, "'\n");
3007    }
3008
3009  stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
3010
3011  size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3012  gimple_call_set_lhs (stmt, size_reloc);
3013
3014  gimple_seq_add_stmt (&seq, stmt);
3015
3016  lb = chkp_build_addr_expr (var);
3017  size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
3018
3019  if (flag_chkp_zero_dynamic_size_as_infinite)
3020    {
3021      /* We should check that size relocation was resolved.
3022	 If it was not then use maximum possible size for the var.  */
3023      max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3024			 fold_convert (chkp_uintptr_type, lb));
3025      max_size = chkp_force_gimple_call_op (max_size, &seq);
3026
3027      cond = build2 (NE_EXPR, boolean_type_node,
3028		     size_reloc, integer_zero_node);
3029      stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3030      gimple_seq_add_stmt (&seq, stmt);
3031    }
3032  else
3033    {
3034      stmt = gimple_build_assign (size, size_reloc);
3035      gimple_seq_add_stmt (&seq, stmt);
3036    }
3037
3038  gsi = gsi_start_bb (chkp_get_entry_block ());
3039  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3040
3041  bounds = chkp_make_bounds (lb, size, &gsi, true);
3042
3043  return bounds;
3044}
3045
3046/* Return 1 if TYPE has fields with zero size or fields
3047   marked with chkp_variable_size attribute.  */
3048bool
3049chkp_variable_size_type (tree type)
3050{
3051  bool res = false;
3052  tree field;
3053
3054  if (RECORD_OR_UNION_TYPE_P (type))
3055    for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3056      {
3057	if (TREE_CODE (field) == FIELD_DECL)
3058	  res = res
3059	    || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3060	    || chkp_variable_size_type (TREE_TYPE (field));
3061      }
3062  else
3063    res = !TYPE_SIZE (type)
3064      || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3065      || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3066
3067  return res;
3068}
3069
3070/* Compute and return bounds for address of DECL which is
3071   one of VAR_DECL, PARM_DECL, RESULT_DECL.  */
3072static tree
3073chkp_get_bounds_for_decl_addr (tree decl)
3074{
3075  tree bounds;
3076
3077  gcc_assert (TREE_CODE (decl) == VAR_DECL
3078	      || TREE_CODE (decl) == PARM_DECL
3079	      || TREE_CODE (decl) == RESULT_DECL);
3080
3081  bounds = chkp_get_registered_addr_bounds (decl);
3082
3083  if (bounds)
3084    return bounds;
3085
3086  if (dump_file && (dump_flags & TDF_DETAILS))
3087    {
3088      fprintf (dump_file, "Building bounds for address of decl ");
3089      print_generic_expr (dump_file, decl, 0);
3090      fprintf (dump_file, "\n");
3091    }
3092
3093  /* Use zero bounds if size is unknown and checks for
3094     unknown sizes are restricted.  */
3095  if ((!DECL_SIZE (decl)
3096       || (chkp_variable_size_type (TREE_TYPE (decl))
3097	   && (TREE_STATIC (decl)
3098	       || DECL_EXTERNAL (decl)
3099	       || TREE_PUBLIC (decl))))
3100      && !flag_chkp_incomplete_type)
3101      return chkp_get_zero_bounds ();
3102
3103  if (flag_chkp_use_static_bounds
3104      && TREE_CODE (decl) == VAR_DECL
3105      && (TREE_STATIC (decl)
3106	      || DECL_EXTERNAL (decl)
3107	      || TREE_PUBLIC (decl))
3108      && !DECL_THREAD_LOCAL_P (decl))
3109    {
3110      tree bnd_var = chkp_make_static_bounds (decl);
3111      gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3112      gimple stmt;
3113
3114      bounds = chkp_get_tmp_reg (gimple_build_nop ());
3115      stmt = gimple_build_assign (bounds, bnd_var);
3116      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3117    }
3118  else if (!DECL_SIZE (decl)
3119      || (chkp_variable_size_type (TREE_TYPE (decl))
3120	  && (TREE_STATIC (decl)
3121	      || DECL_EXTERNAL (decl)
3122	      || TREE_PUBLIC (decl))))
3123    {
3124      gcc_assert (TREE_CODE (decl) == VAR_DECL);
3125      bounds = chkp_generate_extern_var_bounds (decl);
3126    }
3127  else
3128    {
3129      tree lb = chkp_build_addr_expr (decl);
3130      bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3131    }
3132
3133  return bounds;
3134}
3135
3136/* Compute and return bounds for constant string.  */
3137static tree
3138chkp_get_bounds_for_string_cst (tree cst)
3139{
3140  tree bounds;
3141  tree lb;
3142  tree size;
3143
3144  gcc_assert (TREE_CODE (cst) == STRING_CST);
3145
3146  bounds = chkp_get_registered_bounds (cst);
3147
3148  if (bounds)
3149    return bounds;
3150
3151  if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3152      || flag_chkp_use_static_const_bounds > 0)
3153    {
3154      tree bnd_var = chkp_make_static_bounds (cst);
3155      gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3156      gimple stmt;
3157
3158      bounds = chkp_get_tmp_reg (gimple_build_nop ());
3159      stmt = gimple_build_assign (bounds, bnd_var);
3160      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3161    }
3162  else
3163    {
3164      lb = chkp_build_addr_expr (cst);
3165      size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3166      bounds = chkp_make_bounds (lb, size, NULL, false);
3167    }
3168
3169  bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3170
3171  return bounds;
3172}
3173
3174/* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3175   return the result.  if ITER is not NULL then Code is inserted
3176   before position pointed by ITER.  Otherwise code is added to
3177   entry block.  */
3178static tree
3179chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3180{
3181  if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3182    return bounds2 ? bounds2 : bounds1;
3183  else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3184    return bounds1;
3185  else
3186    {
3187      gimple_seq seq;
3188      gimple stmt;
3189      tree bounds;
3190
3191      seq = NULL;
3192
3193      stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3194      chkp_mark_stmt (stmt);
3195
3196      bounds = chkp_get_tmp_reg (stmt);
3197      gimple_call_set_lhs (stmt, bounds);
3198
3199      gimple_seq_add_stmt (&seq, stmt);
3200
3201      /* We are probably doing narrowing for constant expression.
3202	 In such case iter may be undefined.  */
3203      if (!iter)
3204	{
3205	  gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3206	  iter = &gsi;
3207	  gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3208	}
3209      else
3210	gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3211
3212      if (dump_file && (dump_flags & TDF_DETAILS))
3213	{
3214	  fprintf (dump_file, "Bounds intersection: ");
3215	  print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3216	  fprintf (dump_file, "  inserted before statement: ");
3217	  print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3218			     TDF_VOPS|TDF_MEMSYMS);
3219	}
3220
3221      return bounds;
3222    }
3223}
3224
3225/* Return 1 if we are allowed to narrow bounds for addressed FIELD
3226   and 0 othersize.  */
3227static bool
3228chkp_may_narrow_to_field (tree field)
3229{
3230  return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3231    && tree_to_uhwi (DECL_SIZE (field)) != 0
3232    && (!DECL_FIELD_OFFSET (field)
3233	|| TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3234    && (!DECL_FIELD_BIT_OFFSET (field)
3235	|| TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3236    && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3237    && !chkp_variable_size_type (TREE_TYPE (field));
3238}
3239
3240/* Return 1 if bounds for FIELD should be narrowed to
3241   field's own size.  */
3242static bool
3243chkp_narrow_bounds_for_field (tree field)
3244{
3245  HOST_WIDE_INT offs;
3246  HOST_WIDE_INT bit_offs;
3247
3248  if (!chkp_may_narrow_to_field (field))
3249    return false;
3250
3251  /* Accesse to compiler generated fields should not cause
3252     bounds narrowing.  */
3253  if (DECL_ARTIFICIAL (field))
3254    return false;
3255
3256  offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3257  bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3258
3259  return (flag_chkp_narrow_bounds
3260	  && (flag_chkp_first_field_has_own_bounds
3261	      || offs
3262	      || bit_offs));
3263}
3264
3265/* Perform narrowing for BOUNDS using bounds computed for field
3266   access COMPONENT.  ITER meaning is the same as for
3267   chkp_intersect_bounds.  */
3268static tree
3269chkp_narrow_bounds_to_field (tree bounds, tree component,
3270			    gimple_stmt_iterator *iter)
3271{
3272  tree field = TREE_OPERAND (component, 1);
3273  tree size = DECL_SIZE_UNIT (field);
3274  tree field_ptr = chkp_build_addr_expr (component);
3275  tree field_bounds;
3276
3277  field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3278
3279  return chkp_intersect_bounds (field_bounds, bounds, iter);
3280}
3281
3282/* Parse field or array access NODE.
3283
3284   PTR ouput parameter holds a pointer to the outermost
3285   object.
3286
3287   BITFIELD output parameter is set to 1 if bitfield is
3288   accessed and to 0 otherwise.  If it is 1 then ELT holds
3289   outer component for accessed bit field.
3290
3291   SAFE outer parameter is set to 1 if access is safe and
3292   checks are not required.
3293
3294   BOUNDS outer parameter holds bounds to be used to check
3295   access (may be NULL).
3296
3297   If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3298   innermost accessed component.  */
3299static void
3300chkp_parse_array_and_component_ref (tree node, tree *ptr,
3301				    tree *elt, bool *safe,
3302				    bool *bitfield,
3303				    tree *bounds,
3304				    gimple_stmt_iterator *iter,
3305				    bool innermost_bounds)
3306{
3307  tree comp_to_narrow = NULL_TREE;
3308  tree last_comp = NULL_TREE;
3309  bool array_ref_found = false;
3310  tree *nodes;
3311  tree var;
3312  int len;
3313  int i;
3314
3315  /* Compute tree height for expression.  */
3316  var = node;
3317  len = 1;
3318  while (TREE_CODE (var) == COMPONENT_REF
3319	 || TREE_CODE (var) == ARRAY_REF
3320	 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3321    {
3322      var = TREE_OPERAND (var, 0);
3323      len++;
3324    }
3325
3326  gcc_assert (len > 1);
3327
3328  /* It is more convenient for us to scan left-to-right,
3329     so walk tree again and put all node to nodes vector
3330     in reversed order.  */
3331  nodes = XALLOCAVEC (tree, len);
3332  nodes[len - 1] = node;
3333  for (i = len - 2; i >= 0; i--)
3334    nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3335
3336  if (bounds)
3337    *bounds = NULL;
3338  *safe = true;
3339  *bitfield = (TREE_CODE (node) == COMPONENT_REF
3340	       && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3341  /* To get bitfield address we will need outer elemnt.  */
3342  if (*bitfield)
3343    *elt = nodes[len - 2];
3344  else
3345    *elt = NULL_TREE;
3346
3347  /* If we have indirection in expression then compute
3348     outermost structure bounds.  Computed bounds may be
3349     narrowed later.  */
3350  if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3351    {
3352      *safe = false;
3353      *ptr = TREE_OPERAND (nodes[0], 0);
3354      if (bounds)
3355	*bounds = chkp_find_bounds (*ptr, iter);
3356    }
3357  else
3358    {
3359      gcc_assert (TREE_CODE (var) == VAR_DECL
3360		  || TREE_CODE (var) == PARM_DECL
3361		  || TREE_CODE (var) == RESULT_DECL
3362		  || TREE_CODE (var) == STRING_CST
3363		  || TREE_CODE (var) == SSA_NAME);
3364
3365      *ptr = chkp_build_addr_expr (var);
3366    }
3367
3368  /* In this loop we are trying to find a field access
3369     requiring narrowing.  There are two simple rules
3370     for search:
3371     1.  Leftmost array_ref is chosen if any.
3372     2.  Rightmost suitable component_ref is chosen if innermost
3373     bounds are required and no array_ref exists.  */
3374  for (i = 1; i < len; i++)
3375    {
3376      var = nodes[i];
3377
3378      if (TREE_CODE (var) == ARRAY_REF)
3379	{
3380	  *safe = false;
3381	  array_ref_found = true;
3382	  if (flag_chkp_narrow_bounds
3383	      && !flag_chkp_narrow_to_innermost_arrray
3384	      && (!last_comp
3385		  || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3386	    {
3387	      comp_to_narrow = last_comp;
3388	      break;
3389	    }
3390	}
3391      else if (TREE_CODE (var) == COMPONENT_REF)
3392	{
3393	  tree field = TREE_OPERAND (var, 1);
3394
3395	  if (innermost_bounds
3396	      && !array_ref_found
3397	      && chkp_narrow_bounds_for_field (field))
3398	    comp_to_narrow = var;
3399	  last_comp = var;
3400
3401	  if (flag_chkp_narrow_bounds
3402	      && flag_chkp_narrow_to_innermost_arrray
3403	      && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3404	    {
3405	      if (bounds)
3406		*bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3407	      comp_to_narrow = NULL;
3408	    }
3409	}
3410      else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3411	/* Nothing to do for it.  */
3412	;
3413      else
3414	gcc_unreachable ();
3415    }
3416
3417  if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3418    *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3419
3420  if (innermost_bounds && bounds && !*bounds)
3421    *bounds = chkp_find_bounds (*ptr, iter);
3422}
3423
3424/* Compute and return bounds for address of OBJ.  */
3425static tree
3426chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3427{
3428  tree bounds = chkp_get_registered_addr_bounds (obj);
3429
3430  if (bounds)
3431    return bounds;
3432
3433  switch (TREE_CODE (obj))
3434    {
3435    case VAR_DECL:
3436    case PARM_DECL:
3437    case RESULT_DECL:
3438      bounds = chkp_get_bounds_for_decl_addr (obj);
3439      break;
3440
3441    case STRING_CST:
3442      bounds = chkp_get_bounds_for_string_cst (obj);
3443      break;
3444
3445    case ARRAY_REF:
3446    case COMPONENT_REF:
3447      {
3448	tree elt;
3449	tree ptr;
3450	bool safe;
3451	bool bitfield;
3452
3453	chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3454					    &bitfield, &bounds, iter, true);
3455
3456	gcc_assert (bounds);
3457      }
3458      break;
3459
3460    case FUNCTION_DECL:
3461    case LABEL_DECL:
3462      bounds = chkp_get_zero_bounds ();
3463      break;
3464
3465    case MEM_REF:
3466      bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3467      break;
3468
3469    case REALPART_EXPR:
3470    case IMAGPART_EXPR:
3471      bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3472      break;
3473
3474    default:
3475      if (dump_file && (dump_flags & TDF_DETAILS))
3476	{
3477	  fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3478		   "unexpected object of type %s\n",
3479		   get_tree_code_name (TREE_CODE (obj)));
3480	  print_node (dump_file, "", obj, 0);
3481	}
3482      internal_error ("chkp_make_addressed_object_bounds: "
3483		      "Unexpected tree code %s",
3484		      get_tree_code_name (TREE_CODE (obj)));
3485    }
3486
3487  chkp_register_addr_bounds (obj, bounds);
3488
3489  return bounds;
3490}
3491
3492/* Compute bounds for pointer PTR loaded from PTR_SRC.  Generate statements
3493   to compute bounds if required.  Computed bounds should be available at
3494   position pointed by ITER.
3495
3496   If PTR_SRC is NULL_TREE then pointer definition is identified.
3497
3498   If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3499   PTR.  If PTR is a any memory reference then ITER points to a statement
3500   after which bndldx will be inserterd.  In both cases ITER will be updated
3501   to point to the inserted bndldx statement.  */
3502
3503static tree
3504chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3505{
3506  tree addr = NULL_TREE;
3507  tree bounds = NULL_TREE;
3508
3509  if (!ptr_src)
3510    ptr_src = ptr;
3511
3512  bounds = chkp_get_registered_bounds (ptr_src);
3513
3514  if (bounds)
3515    return bounds;
3516
3517  switch (TREE_CODE (ptr_src))
3518    {
3519    case MEM_REF:
3520    case VAR_DECL:
3521      if (BOUNDED_P (ptr_src))
3522	if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3523	  bounds = chkp_get_zero_bounds ();
3524	else
3525	  {
3526	    addr = chkp_build_addr_expr (ptr_src);
3527	    bounds = chkp_build_bndldx (addr, ptr, iter);
3528	  }
3529      else
3530	bounds = chkp_get_nonpointer_load_bounds ();
3531      break;
3532
3533    case ARRAY_REF:
3534    case COMPONENT_REF:
3535      addr = get_base_address (ptr_src);
3536      if (DECL_P (addr)
3537	  || TREE_CODE (addr) == MEM_REF
3538	  || TREE_CODE (addr) == TARGET_MEM_REF)
3539	{
3540	  if (BOUNDED_P (ptr_src))
3541	    if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3542	      bounds = chkp_get_zero_bounds ();
3543	    else
3544	      {
3545		addr = chkp_build_addr_expr (ptr_src);
3546		bounds = chkp_build_bndldx (addr, ptr, iter);
3547	      }
3548	  else
3549	    bounds = chkp_get_nonpointer_load_bounds ();
3550	}
3551      else
3552	{
3553	  gcc_assert (TREE_CODE (addr) == SSA_NAME);
3554	  bounds = chkp_find_bounds (addr, iter);
3555	}
3556      break;
3557
3558    case PARM_DECL:
3559      gcc_unreachable ();
3560      bounds = chkp_get_bound_for_parm (ptr_src);
3561      break;
3562
3563    case TARGET_MEM_REF:
3564      addr = chkp_build_addr_expr (ptr_src);
3565      bounds = chkp_build_bndldx (addr, ptr, iter);
3566      break;
3567
3568    case SSA_NAME:
3569      bounds = chkp_get_registered_bounds (ptr_src);
3570      if (!bounds)
3571	{
3572	  gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3573	  gphi_iterator phi_iter;
3574
3575	  bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3576
3577	  gcc_assert (bounds);
3578
3579	  if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3580	    {
3581	      unsigned i;
3582
3583	      for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3584		{
3585		  tree arg = gimple_phi_arg_def (def_phi, i);
3586		  tree arg_bnd;
3587		  gphi *phi_bnd;
3588
3589		  arg_bnd = chkp_find_bounds (arg, NULL);
3590
3591		  /* chkp_get_bounds_by_definition created new phi
3592		     statement and phi_iter points to it.
3593
3594		     Previous call to chkp_find_bounds could create
3595		     new basic block and therefore change phi statement
3596		     phi_iter points to.  */
3597		  phi_bnd = phi_iter.phi ();
3598
3599		  add_phi_arg (phi_bnd, arg_bnd,
3600			       gimple_phi_arg_edge (def_phi, i),
3601			       UNKNOWN_LOCATION);
3602		}
3603
3604	      /* If all bound phi nodes have their arg computed
3605		 then we may finish its computation.  See
3606		 chkp_finish_incomplete_bounds for more details.  */
3607	      if (chkp_may_finish_incomplete_bounds ())
3608		chkp_finish_incomplete_bounds ();
3609	    }
3610
3611	  gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3612		      || chkp_incomplete_bounds (bounds));
3613	}
3614      break;
3615
3616    case ADDR_EXPR:
3617      bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3618      break;
3619
3620    case INTEGER_CST:
3621      if (integer_zerop (ptr_src))
3622	bounds = chkp_get_none_bounds ();
3623      else
3624	bounds = chkp_get_invalid_op_bounds ();
3625      break;
3626
3627    default:
3628      if (dump_file && (dump_flags & TDF_DETAILS))
3629	{
3630	  fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3631		   get_tree_code_name (TREE_CODE (ptr_src)));
3632	  print_node (dump_file, "", ptr_src, 0);
3633	}
3634      internal_error ("chkp_find_bounds: Unexpected tree code %s",
3635		      get_tree_code_name (TREE_CODE (ptr_src)));
3636    }
3637
3638  if (!bounds)
3639    {
3640      if (dump_file && (dump_flags & TDF_DETAILS))
3641	{
3642	  fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3643	  print_node (dump_file, "", ptr_src, 0);
3644	}
3645      internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3646    }
3647
3648  return bounds;
3649}
3650
3651/* Normal case for bounds search without forced narrowing.  */
3652static tree
3653chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3654{
3655  return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3656}
3657
3658/* Search bounds for pointer PTR loaded from PTR_SRC
3659   by statement *ITER points to.  */
3660static tree
3661chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3662{
3663  return chkp_find_bounds_1 (ptr, ptr_src, iter);
3664}
3665
3666/* Helper function which checks type of RHS and finds all pointers in
3667   it.  For each found pointer we build it's accesses in LHS and RHS
3668   objects and then call HANDLER for them.  Function is used to copy
3669   or initilize bounds for copied object.  */
3670static void
3671chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3672			       assign_handler handler)
3673{
3674  tree type = TREE_TYPE (lhs);
3675
3676  /* We have nothing to do with clobbers.  */
3677  if (TREE_CLOBBER_P (rhs))
3678    return;
3679
3680  if (BOUNDED_TYPE_P (type))
3681    handler (lhs, rhs, arg);
3682  else if (RECORD_OR_UNION_TYPE_P (type))
3683    {
3684      tree field;
3685
3686      if (TREE_CODE (rhs) == CONSTRUCTOR)
3687	{
3688	  unsigned HOST_WIDE_INT cnt;
3689	  tree val;
3690
3691	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3692	    {
3693	      if (chkp_type_has_pointer (TREE_TYPE (field)))
3694		{
3695		  tree lhs_field = chkp_build_component_ref (lhs, field);
3696		  chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3697		}
3698	    }
3699	}
3700      else
3701	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3702	  if (TREE_CODE (field) == FIELD_DECL
3703	      && chkp_type_has_pointer (TREE_TYPE (field)))
3704	    {
3705	      tree rhs_field = chkp_build_component_ref (rhs, field);
3706	      tree lhs_field = chkp_build_component_ref (lhs, field);
3707	      chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3708	    }
3709    }
3710  else if (TREE_CODE (type) == ARRAY_TYPE)
3711    {
3712      unsigned HOST_WIDE_INT cur = 0;
3713      tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3714      tree etype = TREE_TYPE (type);
3715      tree esize = TYPE_SIZE (etype);
3716
3717      if (TREE_CODE (rhs) == CONSTRUCTOR)
3718	{
3719	  unsigned HOST_WIDE_INT cnt;
3720	  tree purp, val, lhs_elem;
3721
3722	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3723	    {
3724	      if (purp && TREE_CODE (purp) == RANGE_EXPR)
3725		{
3726		  tree lo_index = TREE_OPERAND (purp, 0);
3727		  tree hi_index = TREE_OPERAND (purp, 1);
3728
3729		  for (cur = (unsigned)tree_to_uhwi (lo_index);
3730		       cur <= (unsigned)tree_to_uhwi (hi_index);
3731		       cur++)
3732		    {
3733		      lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3734		      chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3735		    }
3736		}
3737	      else
3738		{
3739		  if (purp)
3740		    {
3741		      gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3742		      cur = tree_to_uhwi (purp);
3743		    }
3744
3745		  lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3746
3747		  chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3748		}
3749	    }
3750	}
3751      /* Copy array only when size is known.  */
3752      else if (maxval && !integer_minus_onep (maxval))
3753	for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3754	  {
3755	    tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3756	    tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3757	    chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3758	  }
3759    }
3760  else
3761    internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3762		   get_tree_code_name (TREE_CODE (type)));
3763}
3764
3765/* Add code to copy bounds for assignment of RHS to LHS.
3766   ARG is an iterator pointing ne code position.  */
3767static void
3768chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3769{
3770  gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3771  tree bounds = chkp_find_bounds (rhs, iter);
3772  tree addr = chkp_build_addr_expr(lhs);
3773
3774  chkp_build_bndstx (addr, rhs, bounds, iter);
3775}
3776
3777/* Emit static bound initilizers and size vars.  */
3778void
3779chkp_finish_file (void)
3780{
3781  struct varpool_node *node;
3782  struct chkp_ctor_stmt_list stmts;
3783
3784  if (seen_error ())
3785    return;
3786
3787  /* Iterate through varpool and generate bounds initialization
3788     constructors for all statically initialized pointers.  */
3789  stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3790  stmts.stmts = NULL;
3791  FOR_EACH_VARIABLE (node)
3792    /* Check that var is actually emitted and we need and may initialize
3793       its bounds.  */
3794    if (node->need_bounds_init
3795	&& !POINTER_BOUNDS_P (node->decl)
3796	&& DECL_RTL (node->decl)
3797	&& MEM_P (DECL_RTL (node->decl))
3798	&& TREE_ASM_WRITTEN (node->decl))
3799      {
3800	chkp_walk_pointer_assignments (node->decl,
3801				       DECL_INITIAL (node->decl),
3802				       &stmts,
3803				       chkp_add_modification_to_stmt_list);
3804
3805	if (stmts.avail <= 0)
3806	  {
3807	    cgraph_build_static_cdtor ('P', stmts.stmts,
3808				       MAX_RESERVED_INIT_PRIORITY + 3);
3809	    stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3810	    stmts.stmts = NULL;
3811	  }
3812      }
3813
3814  if (stmts.stmts)
3815    cgraph_build_static_cdtor ('P', stmts.stmts,
3816			       MAX_RESERVED_INIT_PRIORITY + 3);
3817
3818  /* Iterate through varpool and generate bounds initialization
3819     constructors for all static bounds vars.  */
3820  stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3821  stmts.stmts = NULL;
3822  FOR_EACH_VARIABLE (node)
3823    if (node->need_bounds_init
3824	&& POINTER_BOUNDS_P (node->decl)
3825	&& TREE_ASM_WRITTEN (node->decl))
3826      {
3827	tree bnd = node->decl;
3828	tree var;
3829
3830	gcc_assert (DECL_INITIAL (bnd)
3831		    && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3832
3833	var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3834	chkp_output_static_bounds (bnd, var, &stmts);
3835      }
3836
3837  if (stmts.stmts)
3838    cgraph_build_static_cdtor ('B', stmts.stmts,
3839			       MAX_RESERVED_INIT_PRIORITY + 2);
3840
3841  delete chkp_static_var_bounds;
3842  delete chkp_bounds_map;
3843}
3844
3845/* An instrumentation function which is called for each statement
3846   having memory access we want to instrument.  It inserts check
3847   code and bounds copy code.
3848
3849   ITER points to statement to instrument.
3850
3851   NODE holds memory access in statement to check.
3852
3853   LOC holds the location information for statement.
3854
3855   DIRFLAGS determines whether access is read or write.
3856
3857   ACCESS_OFFS should be added to address used in NODE
3858   before check.
3859
3860   ACCESS_SIZE holds size of checked access.
3861
3862   SAFE indicates if NODE access is safe and should not be
3863   checked.  */
3864static void
3865chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3866		   location_t loc, tree dirflag,
3867		   tree access_offs, tree access_size,
3868		   bool safe)
3869{
3870  tree node_type = TREE_TYPE (node);
3871  tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3872  tree addr_first = NULL_TREE; /* address of the first accessed byte */
3873  tree addr_last = NULL_TREE; /* address of the last accessed byte */
3874  tree ptr = NULL_TREE; /* a pointer used for dereference */
3875  tree bounds = NULL_TREE;
3876
3877  /* We do not need instrumentation for clobbers.  */
3878  if (dirflag == integer_one_node
3879      && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3880      && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3881    return;
3882
3883  switch (TREE_CODE (node))
3884    {
3885    case ARRAY_REF:
3886    case COMPONENT_REF:
3887      {
3888	bool bitfield;
3889	tree elt;
3890
3891	if (safe)
3892	  {
3893	    /* We are not going to generate any checks, so do not
3894	       generate bounds as well.  */
3895	    addr_first = chkp_build_addr_expr (node);
3896	    break;
3897	  }
3898
3899	chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3900					    &bitfield, &bounds, iter, false);
3901
3902	/* Break if there is no dereference and operation is safe.  */
3903
3904	if (bitfield)
3905          {
3906            tree field = TREE_OPERAND (node, 1);
3907
3908            if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3909              size = DECL_SIZE_UNIT (field);
3910
3911	    if (elt)
3912	      elt = chkp_build_addr_expr (elt);
3913            addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3914            addr_first = fold_build_pointer_plus_loc (loc,
3915						      addr_first,
3916						      byte_position (field));
3917          }
3918        else
3919          addr_first = chkp_build_addr_expr (node);
3920      }
3921      break;
3922
3923    case INDIRECT_REF:
3924      ptr = TREE_OPERAND (node, 0);
3925      addr_first = ptr;
3926      break;
3927
3928    case MEM_REF:
3929      ptr = TREE_OPERAND (node, 0);
3930      addr_first = chkp_build_addr_expr (node);
3931      break;
3932
3933    case TARGET_MEM_REF:
3934      ptr = TMR_BASE (node);
3935      addr_first = chkp_build_addr_expr (node);
3936      break;
3937
3938    case ARRAY_RANGE_REF:
3939      printf("ARRAY_RANGE_REF\n");
3940      debug_gimple_stmt(gsi_stmt(*iter));
3941      debug_tree(node);
3942      gcc_unreachable ();
3943      break;
3944
3945    case BIT_FIELD_REF:
3946      {
3947	tree offs, rem, bpu;
3948
3949	gcc_assert (!access_offs);
3950	gcc_assert (!access_size);
3951
3952	bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3953	offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3954	rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3955	offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3956
3957	size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3958        size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3959        size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3960        size = fold_convert (size_type_node, size);
3961
3962	chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3963			 dirflag, offs, size, safe);
3964	return;
3965      }
3966      break;
3967
3968    case VAR_DECL:
3969    case RESULT_DECL:
3970    case PARM_DECL:
3971      if (dirflag != integer_one_node
3972	  || DECL_REGISTER (node))
3973	return;
3974
3975      safe = true;
3976      addr_first = chkp_build_addr_expr (node);
3977      break;
3978
3979    default:
3980      return;
3981    }
3982
3983  /* If addr_last was not computed then use (addr_first + size - 1)
3984     expression to compute it.  */
3985  if (!addr_last)
3986    {
3987      addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3988      addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3989    }
3990
3991  /* Shift both first_addr and last_addr by access_offs if specified.  */
3992  if (access_offs)
3993    {
3994      addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3995      addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3996    }
3997
3998  /* Generate bndcl/bndcu checks if memory access is not safe.  */
3999  if (!safe)
4000    {
4001      gimple_stmt_iterator stmt_iter = *iter;
4002
4003      if (!bounds)
4004	bounds = chkp_find_bounds (ptr, iter);
4005
4006      chkp_check_mem_access (addr_first, addr_last, bounds,
4007			     stmt_iter, loc, dirflag);
4008    }
4009
4010  /* We need to store bounds in case pointer is stored.  */
4011  if (dirflag == integer_one_node
4012      && chkp_type_has_pointer (node_type)
4013      && flag_chkp_store_bounds)
4014    {
4015      gimple stmt = gsi_stmt (*iter);
4016      tree rhs1 = gimple_assign_rhs1 (stmt);
4017      enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4018
4019      if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4020	chkp_walk_pointer_assignments (node, rhs1, iter,
4021				       chkp_copy_bounds_for_elem);
4022      else
4023	{
4024	  bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4025	  chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4026	}
4027    }
4028}
4029
4030/* Add code to copy bounds for all pointers copied
4031   in ASSIGN created during inline of EDGE.  */
4032void
4033chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
4034{
4035  tree lhs = gimple_assign_lhs (assign);
4036  tree rhs = gimple_assign_rhs1 (assign);
4037  gimple_stmt_iterator iter = gsi_for_stmt (assign);
4038
4039  if (!flag_chkp_store_bounds)
4040    return;
4041
4042  chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4043
4044  /* We should create edges for all created calls to bndldx and bndstx.  */
4045  while (gsi_stmt (iter) != assign)
4046    {
4047      gimple stmt = gsi_stmt (iter);
4048      if (gimple_code (stmt) == GIMPLE_CALL)
4049	{
4050	  tree fndecl = gimple_call_fndecl (stmt);
4051	  struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4052	  struct cgraph_edge *new_edge;
4053
4054	  gcc_assert (fndecl == chkp_bndstx_fndecl
4055		      || fndecl == chkp_bndldx_fndecl
4056		      || fndecl == chkp_ret_bnd_fndecl);
4057
4058	  new_edge = edge->caller->create_edge (callee,
4059						as_a <gcall *> (stmt),
4060						edge->count,
4061						edge->frequency);
4062	  new_edge->frequency = compute_call_stmt_bb_frequency
4063	    (edge->caller->decl, gimple_bb (stmt));
4064	}
4065      gsi_prev (&iter);
4066    }
4067}
4068
4069/* Some code transformation made during instrumentation pass
4070   may put code into inconsistent state.  Here we find and fix
4071   such flaws.  */
4072void
4073chkp_fix_cfg ()
4074{
4075  basic_block bb;
4076  gimple_stmt_iterator i;
4077
4078  /* We could insert some code right after stmt which ends bb.
4079     We wanted to put this code on fallthru edge but did not
4080     add new edges from the beginning because it may cause new
4081     phi node creation which may be incorrect due to incomplete
4082     bound phi nodes.  */
4083  FOR_ALL_BB_FN (bb, cfun)
4084    for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4085      {
4086	gimple stmt = gsi_stmt (i);
4087	gimple_stmt_iterator next = i;
4088
4089	gsi_next (&next);
4090
4091	if (stmt_ends_bb_p (stmt)
4092	    && !gsi_end_p (next))
4093	  {
4094	    edge fall = find_fallthru_edge (bb->succs);
4095	    basic_block dest = NULL;
4096	    int flags = 0;
4097
4098	    gcc_assert (fall);
4099
4100	    /* We cannot split abnormal edge.  Therefore we
4101	       store its params, make it regular and then
4102	       rebuild abnormal edge after split.  */
4103	    if (fall->flags & EDGE_ABNORMAL)
4104	      {
4105		flags = fall->flags & ~EDGE_FALLTHRU;
4106		dest = fall->dest;
4107
4108		fall->flags &= ~EDGE_COMPLEX;
4109	      }
4110
4111	    while (!gsi_end_p (next))
4112	      {
4113		gimple next_stmt = gsi_stmt (next);
4114		gsi_remove (&next, false);
4115		gsi_insert_on_edge (fall, next_stmt);
4116	      }
4117
4118	    gsi_commit_edge_inserts ();
4119
4120	    /* Re-create abnormal edge.  */
4121	    if (dest)
4122	      make_edge (bb, dest, flags);
4123	  }
4124      }
4125}
4126
4127/* Walker callback for chkp_replace_function_pointers.  Replaces
4128   function pointer in the specified operand with pointer to the
4129   instrumented function version.  */
4130static tree
4131chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4132			       void *data ATTRIBUTE_UNUSED)
4133{
4134  if (TREE_CODE (*op) == FUNCTION_DECL
4135      && chkp_instrumentable_p (*op)
4136      && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4137	  /* For builtins we replace pointers only for selected
4138	     function and functions having definitions.  */
4139	  || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4140	      && (chkp_instrument_normal_builtin (*op)
4141		  || gimple_has_body_p (*op)))))
4142    {
4143      struct cgraph_node *node = cgraph_node::get_create (*op);
4144      struct cgraph_node *clone = NULL;
4145
4146      if (!node->instrumentation_clone)
4147	clone = chkp_maybe_create_clone (*op);
4148
4149      if (clone)
4150	*op = clone->decl;
4151      *walk_subtrees = 0;
4152    }
4153
4154  return NULL;
4155}
4156
4157/* This function searches for function pointers in statement
4158   pointed by GSI and replaces them with pointers to instrumented
4159   function versions.  */
4160static void
4161chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4162{
4163  gimple stmt = gsi_stmt (*gsi);
4164  /* For calls we want to walk call args only.  */
4165  if (gimple_code (stmt) == GIMPLE_CALL)
4166    {
4167      unsigned i;
4168      for (i = 0; i < gimple_call_num_args (stmt); i++)
4169	walk_tree (gimple_call_arg_ptr (stmt, i),
4170		   chkp_replace_function_pointer, NULL, NULL);
4171    }
4172  else
4173    walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4174}
4175
4176/* This function instruments all statements working with memory,
4177   calls and rets.
4178
4179   It also removes excess statements from static initializers.  */
4180static void
4181chkp_instrument_function (void)
4182{
4183  basic_block bb, next;
4184  gimple_stmt_iterator i;
4185  enum gimple_rhs_class grhs_class;
4186  bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4187
4188  bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4189  do
4190    {
4191      next = bb->next_bb;
4192      for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4193        {
4194          gimple s = gsi_stmt (i);
4195
4196	  /* Skip statement marked to not be instrumented.  */
4197	  if (chkp_marked_stmt_p (s))
4198	    {
4199	      gsi_next (&i);
4200	      continue;
4201	    }
4202
4203	  chkp_replace_function_pointers (&i);
4204
4205          switch (gimple_code (s))
4206            {
4207            case GIMPLE_ASSIGN:
4208	      chkp_process_stmt (&i, gimple_assign_lhs (s),
4209				 gimple_location (s), integer_one_node,
4210				 NULL_TREE, NULL_TREE, safe);
4211	      chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4212				 gimple_location (s), integer_zero_node,
4213				 NULL_TREE, NULL_TREE, safe);
4214	      grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4215	      if (grhs_class == GIMPLE_BINARY_RHS)
4216		chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4217				   gimple_location (s), integer_zero_node,
4218				   NULL_TREE, NULL_TREE, safe);
4219              break;
4220
4221            case GIMPLE_RETURN:
4222	      {
4223		greturn *r = as_a <greturn *> (s);
4224		if (gimple_return_retval (r) != NULL_TREE)
4225		  {
4226		    chkp_process_stmt (&i, gimple_return_retval (r),
4227				       gimple_location (r),
4228				       integer_zero_node,
4229				       NULL_TREE, NULL_TREE, safe);
4230
4231		    /* Additionally we need to add bounds
4232		       to return statement.  */
4233		    chkp_add_bounds_to_ret_stmt (&i);
4234		  }
4235	      }
4236	      break;
4237
4238	    case GIMPLE_CALL:
4239	      chkp_add_bounds_to_call_stmt (&i);
4240	      break;
4241
4242            default:
4243              ;
4244            }
4245
4246	  gsi_next (&i);
4247
4248	  /* We do not need any actual pointer stores in checker
4249	     static initializer.  */
4250	  if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4251	      && gimple_code (s) == GIMPLE_ASSIGN
4252	      && gimple_store_p (s))
4253	    {
4254	      gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4255	      gsi_remove (&del_iter, true);
4256	      unlink_stmt_vdef (s);
4257	      release_defs(s);
4258	    }
4259        }
4260      bb = next;
4261    }
4262  while (bb);
4263
4264  /* Some input params may have bounds and be address taken.  In this case
4265     we should store incoming bounds into bounds table.  */
4266  tree arg;
4267  if (flag_chkp_store_bounds)
4268    for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4269      if (TREE_ADDRESSABLE (arg))
4270	{
4271	  if (BOUNDED_P (arg))
4272	    {
4273	      tree bounds = chkp_get_next_bounds_parm (arg);
4274	      tree def_ptr = ssa_default_def (cfun, arg);
4275	      gimple_stmt_iterator iter
4276		= gsi_start_bb (chkp_get_entry_block ());
4277	      chkp_build_bndstx (chkp_build_addr_expr (arg),
4278				 def_ptr ? def_ptr : arg,
4279				 bounds, &iter);
4280
4281	      /* Skip bounds arg.  */
4282	      arg = TREE_CHAIN (arg);
4283	    }
4284	  else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4285	    {
4286	      tree orig_arg = arg;
4287	      bitmap slots = BITMAP_ALLOC (NULL);
4288	      gimple_stmt_iterator iter
4289		= gsi_start_bb (chkp_get_entry_block ());
4290	      bitmap_iterator bi;
4291	      unsigned bnd_no;
4292
4293	      chkp_find_bound_slots (TREE_TYPE (arg), slots);
4294
4295	      EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4296		{
4297		  tree bounds = chkp_get_next_bounds_parm (arg);
4298		  HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4299		  tree addr = chkp_build_addr_expr (orig_arg);
4300		  tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4301				     build_int_cst (ptr_type_node, offs));
4302		  chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4303				     bounds, &iter);
4304
4305		  arg = DECL_CHAIN (arg);
4306		}
4307	      BITMAP_FREE (slots);
4308	    }
4309	}
4310}
4311
4312/* Find init/null/copy_ptr_bounds calls and replace them
4313   with assignments.  It should allow better code
4314   optimization.  */
4315
4316static void
4317chkp_remove_useless_builtins ()
4318{
4319  basic_block bb;
4320  gimple_stmt_iterator gsi;
4321
4322  FOR_EACH_BB_FN (bb, cfun)
4323    {
4324      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4325        {
4326	  gimple stmt = gsi_stmt (gsi);
4327	  tree fndecl;
4328	  enum built_in_function fcode;
4329
4330	  /* Find builtins returning first arg and replace
4331	     them with assignments.  */
4332	  if (gimple_code (stmt) == GIMPLE_CALL
4333	      && (fndecl = gimple_call_fndecl (stmt))
4334	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4335	      && (fcode = DECL_FUNCTION_CODE (fndecl))
4336	      && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4337		  || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4338		  || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4339		  || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4340	    {
4341	      tree res = gimple_call_arg (stmt, 0);
4342	      update_call_from_tree (&gsi, res);
4343	      stmt = gsi_stmt (gsi);
4344	      update_stmt (stmt);
4345	    }
4346        }
4347    }
4348}
4349
4350/* Initialize pass.  */
4351static void
4352chkp_init (void)
4353{
4354  basic_block bb;
4355  gimple_stmt_iterator i;
4356
4357  in_chkp_pass = true;
4358
4359  for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4360    for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4361      chkp_unmark_stmt (gsi_stmt (i));
4362
4363  chkp_invalid_bounds = new hash_set<tree>;
4364  chkp_completed_bounds_set = new hash_set<tree>;
4365  delete chkp_reg_bounds;
4366  chkp_reg_bounds = new hash_map<tree, tree>;
4367  delete chkp_bound_vars;
4368  chkp_bound_vars = new hash_map<tree, tree>;
4369  chkp_reg_addr_bounds = new hash_map<tree, tree>;
4370  chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4371  delete chkp_bounds_map;
4372  chkp_bounds_map = new hash_map<tree, tree>;
4373  chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4374
4375  entry_block = NULL;
4376  zero_bounds = NULL_TREE;
4377  none_bounds = NULL_TREE;
4378  incomplete_bounds = integer_zero_node;
4379  tmp_var = NULL_TREE;
4380  size_tmp_var = NULL_TREE;
4381
4382  chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4383
4384  /* We create these constant bounds once for each object file.
4385     These symbols go to comdat section and result in single copy
4386     of each one in the final binary.  */
4387  chkp_get_zero_bounds_var ();
4388  chkp_get_none_bounds_var ();
4389
4390  calculate_dominance_info (CDI_DOMINATORS);
4391  calculate_dominance_info (CDI_POST_DOMINATORS);
4392
4393  bitmap_obstack_initialize (NULL);
4394}
4395
4396/* Finalize instrumentation pass.  */
4397static void
4398chkp_fini (void)
4399{
4400  in_chkp_pass = false;
4401
4402  delete chkp_invalid_bounds;
4403  delete chkp_completed_bounds_set;
4404  delete chkp_reg_addr_bounds;
4405  delete chkp_incomplete_bounds_map;
4406
4407  free_dominance_info (CDI_DOMINATORS);
4408  free_dominance_info (CDI_POST_DOMINATORS);
4409
4410  bitmap_obstack_release (NULL);
4411
4412  entry_block = NULL;
4413  zero_bounds = NULL_TREE;
4414  none_bounds = NULL_TREE;
4415}
4416
4417/* Main instrumentation pass function.  */
4418static unsigned int
4419chkp_execute (void)
4420{
4421  chkp_init ();
4422
4423  chkp_instrument_function ();
4424
4425  chkp_remove_useless_builtins ();
4426
4427  chkp_function_mark_instrumented (cfun->decl);
4428
4429  chkp_fix_cfg ();
4430
4431  chkp_fini ();
4432
4433  return 0;
4434}
4435
4436/* Instrumentation pass gate.  */
4437static bool
4438chkp_gate (void)
4439{
4440  return cgraph_node::get (cfun->decl)->instrumentation_clone
4441    || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4442}
4443
4444namespace {
4445
4446const pass_data pass_data_chkp =
4447{
4448  GIMPLE_PASS, /* type */
4449  "chkp", /* name */
4450  OPTGROUP_NONE, /* optinfo_flags */
4451  TV_NONE, /* tv_id */
4452  PROP_ssa | PROP_cfg, /* properties_required */
4453  0, /* properties_provided */
4454  0, /* properties_destroyed */
4455  0, /* todo_flags_start */
4456  TODO_verify_il
4457  | TODO_update_ssa /* todo_flags_finish */
4458};
4459
4460class pass_chkp : public gimple_opt_pass
4461{
4462public:
4463  pass_chkp (gcc::context *ctxt)
4464    : gimple_opt_pass (pass_data_chkp, ctxt)
4465  {}
4466
4467  /* opt_pass methods: */
4468  virtual opt_pass * clone ()
4469    {
4470      return new pass_chkp (m_ctxt);
4471    }
4472
4473  virtual bool gate (function *)
4474    {
4475      return chkp_gate ();
4476    }
4477
4478  virtual unsigned int execute (function *)
4479    {
4480      return chkp_execute ();
4481    }
4482
4483}; // class pass_chkp
4484
4485} // anon namespace
4486
4487gimple_opt_pass *
4488make_pass_chkp (gcc::context *ctxt)
4489{
4490  return new pass_chkp (ctxt);
4491}
4492
4493#include "gt-tree-chkp.h"
4494