Kill OVL_CURRENT, OVL_NEXT.
[gcc.git] / gcc / tree-chkp.c
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2017 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
55
56 /* Pointer Bounds Checker instruments code with memory checks to find
57 out-of-bounds memory accesses. Checks are performed by computing
58 bounds for each pointer and then comparing address of accessed
59 memory before pointer dereferencing.
60
61 1. Function clones.
62
63 See ipa-chkp.c.
64
65 2. Instrumentation.
66
67 There are few things to instrument:
68
69 a) Memory accesses - add checker calls to check address of accessed memory
70 against bounds of dereferenced pointer. Obviously safe memory
71 accesses like static variable access does not have to be instrumented
72 with checks.
73
74 Example:
75
76 val_2 = *p_1;
77
78 with 4 bytes access is transformed into:
79
80 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
81 D.1_4 = p_1 + 3;
82 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
83 val_2 = *p_1;
84
85 where __bound_tmp.1_3 are bounds computed for pointer p_1,
86 __builtin___chkp_bndcl is a lower bound check and
87 __builtin___chkp_bndcu is an upper bound check.
88
89 b) Pointer stores.
90
91 When pointer is stored in memory we need to store its bounds. To
92 achieve compatibility of instrumented code with regular codes
93 we have to keep data layout and store bounds in special bound tables
94 via special checker call. Implementation of bounds table may vary for
95 different platforms. It has to associate pointer value and its
96 location (it is required because we may have two equal pointers
97 with different bounds stored in different places) with bounds.
98 Another checker builtin allows to get bounds for specified pointer
99 loaded from specified location.
100
101 Example:
102
103 buf1[i_1] = &buf2;
104
105 is transformed into:
106
107 buf1[i_1] = &buf2;
108 D.1_2 = &buf1[i_1];
109 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
110
111 where __bound_tmp.1_2 are bounds of &buf2.
112
113 c) Static initialization.
114
115 The special case of pointer store is static pointer initialization.
116 Bounds initialization is performed in a few steps:
117 - register all static initializations in front-end using
118 chkp_register_var_initializer
119 - when file compilation finishes we create functions with special
120 attribute 'chkp ctor' and put explicit initialization code
121 (assignments) for all statically initialized pointers.
122 - when checker constructor is compiled checker pass adds required
123 bounds initialization for all statically initialized pointers
124 - since we do not actually need excess pointers initialization
125 in checker constructor we remove such assignments from them
126
127 d) Calls.
128
129 For each call in the code we add additional arguments to pass
130 bounds for pointer arguments. We determine type of call arguments
131 using arguments list from function declaration; if function
132 declaration is not available we use function type; otherwise
133 (e.g. for unnamed arguments) we use type of passed value. Function
134 declaration/type is replaced with the instrumented one.
135
136 Example:
137
138 val_1 = foo (&buf1, &buf2, &buf1, 0);
139
140 is translated into:
141
142 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143 &buf1, __bound_tmp.1_2, 0);
144
145 e) Returns.
146
147 If function returns a pointer value we have to return bounds also.
148 A new operand was added for return statement to hold returned bounds.
149
150 Example:
151
152 return &_buf1;
153
154 is transformed into
155
156 return &_buf1, __bound_tmp.1_1;
157
158 3. Bounds computation.
159
160 Compiler is fully responsible for computing bounds to be used for each
161 memory access. The first step for bounds computation is to find the
162 origin of pointer dereferenced for memory access. Basing on pointer
163 origin we define a way to compute its bounds. There are just few
164 possible cases:
165
166 a) Pointer is returned by call.
167
168 In this case we use corresponding checker builtin method to obtain returned
169 bounds.
170
171 Example:
172
173 buf_1 = malloc (size_2);
174 foo (buf_1);
175
176 is translated into:
177
178 buf_1 = malloc (size_2);
179 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180 foo (buf_1, __bound_tmp.1_3);
181
182 b) Pointer is an address of an object.
183
184 In this case compiler tries to compute objects size and create corresponding
185 bounds. If object has incomplete type then special checker builtin is used to
186 obtain its size at runtime.
187
188 Example:
189
190 foo ()
191 {
192 <unnamed type> __bound_tmp.3;
193 static int buf[100];
194
195 <bb 3>:
196 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
197
198 <bb 2>:
199 return &buf, __bound_tmp.3_2;
200 }
201
202 Example:
203
204 Address of an object 'extern int buf[]' with incomplete type is
205 returned.
206
207 foo ()
208 {
209 <unnamed type> __bound_tmp.4;
210 long unsigned int __size_tmp.3;
211
212 <bb 3>:
213 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
215
216 <bb 2>:
217 return &buf, __bound_tmp.4_3;
218 }
219
220 c) Pointer is the result of object narrowing.
221
222 It happens when we use pointer to an object to compute pointer to a part
223 of an object. E.g. we take pointer to a field of a structure. In this
224 case we perform bounds intersection using bounds of original object and
225 bounds of object's part (which are computed basing on its type).
226
227 There may be some debatable questions about when narrowing should occur
228 and when it should not. To avoid false bound violations in correct
229 programs we do not perform narrowing when address of an array element is
230 obtained (it has address of the whole array) and when address of the first
231 structure field is obtained (because it is guaranteed to be equal to
232 address of the whole structure and it is legal to cast it back to structure).
233
234 Default narrowing behavior may be changed using compiler flags.
235
236 Example:
237
238 In this example address of the second structure field is returned.
239
240 foo (struct A * p, __bounds_type __bounds_of_p)
241 {
242 <unnamed type> __bound_tmp.3;
243 int * _2;
244 int * _5;
245
246 <bb 2>:
247 _5 = &p_1(D)->second_field;
248 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
250 __bounds_of_p_3(D));
251 _2 = &p_1(D)->second_field;
252 return _2, __bound_tmp.3_8;
253 }
254
255 Example:
256
257 In this example address of the first field of array element is returned.
258
259 foo (struct A * p, __bounds_type __bounds_of_p, int i)
260 {
261 long unsigned int _3;
262 long unsigned int _4;
263 struct A * _6;
264 int * _7;
265
266 <bb 2>:
267 _3 = (long unsigned int) i_1(D);
268 _4 = _3 * 8;
269 _6 = p_5(D) + _4;
270 _7 = &_6->first_field;
271 return _7, __bounds_of_p_2(D);
272 }
273
274
275 d) Pointer is the result of pointer arithmetic or type cast.
276
277 In this case bounds of the base pointer are used. In case of binary
278 operation producing a pointer we are analyzing data flow further
279 looking for operand's bounds. One operand is considered as a base
280 if it has some valid bounds. If we fall into a case when none of
281 operands (or both of them) has valid bounds, a default bounds value
282 is used.
283
284 Trying to find out bounds for binary operations we may fall into
285 cyclic dependencies for pointers. To avoid infinite recursion all
286 walked phi nodes instantly obtain corresponding bounds but created
287 bounds are marked as incomplete. It helps us to stop DF walk during
288 bounds search.
289
290 When we reach pointer source, some args of incomplete bounds phi obtain
291 valid bounds and those values are propagated further through phi nodes.
292 If no valid bounds were found for phi node then we mark its result as
293 invalid bounds. Process stops when all incomplete bounds become either
294 valid or invalid and we are able to choose a pointer base.
295
296 e) Pointer is loaded from the memory.
297
298 In this case we just need to load bounds from the bounds table.
299
300 Example:
301
302 foo ()
303 {
304 <unnamed type> __bound_tmp.3;
305 static int * buf;
306 int * _2;
307
308 <bb 2>:
309 _2 = buf;
310 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 return _2, __bound_tmp.3_4;
312 }
313
314 */
315
316 typedef void (*assign_handler)(tree, tree, void *);
317
318 static tree chkp_get_zero_bounds ();
319 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
320 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
321 gimple_stmt_iterator *iter);
322 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
323 tree *elt, bool *safe,
324 bool *bitfield,
325 tree *bounds,
326 gimple_stmt_iterator *iter,
327 bool innermost_bounds);
328 static void chkp_parse_bit_field_ref (tree node, location_t loc,
329 tree *offset, tree *size);
330
331 #define chkp_bndldx_fndecl \
332 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
333 #define chkp_bndstx_fndecl \
334 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
335 #define chkp_checkl_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
337 #define chkp_checku_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
339 #define chkp_bndmk_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
341 #define chkp_ret_bnd_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
343 #define chkp_intersect_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
345 #define chkp_narrow_bounds_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
347 #define chkp_sizeof_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
349 #define chkp_extract_lower_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
351 #define chkp_extract_upper_fndecl \
352 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
353
354 static GTY (()) tree chkp_uintptr_type;
355
356 static GTY (()) tree chkp_zero_bounds_var;
357 static GTY (()) tree chkp_none_bounds_var;
358
359 static GTY (()) basic_block entry_block;
360 static GTY (()) tree zero_bounds;
361 static GTY (()) tree none_bounds;
362 static GTY (()) tree incomplete_bounds;
363 static GTY (()) tree tmp_var;
364 static GTY (()) tree size_tmp_var;
365 static GTY (()) bitmap chkp_abnormal_copies;
366
367 struct hash_set<tree> *chkp_invalid_bounds;
368 struct hash_set<tree> *chkp_completed_bounds_set;
369 struct hash_map<tree, tree> *chkp_reg_bounds;
370 struct hash_map<tree, tree> *chkp_bound_vars;
371 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
372 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
373 struct hash_map<tree, tree> *chkp_bounds_map;
374 struct hash_map<tree, tree> *chkp_static_var_bounds;
375
376 static bool in_chkp_pass;
377
378 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
379 #define CHKP_SIZE_TMP_NAME "__size_tmp"
380 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
381 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
382 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
383 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
384 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
385
386 /* Static checker constructors may become very large and their
387 compilation with optimization may take too much time.
388 Therefore we put a limit to number of statements in one
389 constructor. Tests with 100 000 statically initialized
390 pointers showed following compilation times on Sandy Bridge
391 server (used -O2):
392 limit 100 => ~18 sec.
393 limit 300 => ~22 sec.
394 limit 1000 => ~30 sec.
395 limit 3000 => ~49 sec.
396 limit 5000 => ~55 sec.
397 limit 10000 => ~76 sec.
398 limit 100000 => ~532 sec. */
399 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
400
401 struct chkp_ctor_stmt_list
402 {
403 tree stmts;
404 int avail;
405 };
406
407 /* Return 1 if function FNDECL is instrumented by Pointer
408 Bounds Checker. */
409 bool
410 chkp_function_instrumented_p (tree fndecl)
411 {
412 return fndecl
413 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
414 }
415
416 /* Mark function FNDECL as instrumented. */
417 void
418 chkp_function_mark_instrumented (tree fndecl)
419 {
420 if (chkp_function_instrumented_p (fndecl))
421 return;
422
423 DECL_ATTRIBUTES (fndecl)
424 = tree_cons (get_identifier ("chkp instrumented"), NULL,
425 DECL_ATTRIBUTES (fndecl));
426 }
427
428 /* Return true when STMT is builtin call to instrumentation function
429 corresponding to CODE. */
430
431 bool
432 chkp_gimple_call_builtin_p (gimple *call,
433 enum built_in_function code)
434 {
435 tree fndecl;
436 if (gimple_call_builtin_p (call, BUILT_IN_MD)
437 && (fndecl = targetm.builtin_chkp_function (code))
438 && (DECL_FUNCTION_CODE (gimple_call_fndecl (call))
439 == DECL_FUNCTION_CODE (fndecl)))
440 return true;
441 return false;
442 }
443
444 /* Emit code to build zero bounds and return RTL holding
445 the result. */
446 rtx
447 chkp_expand_zero_bounds ()
448 {
449 tree zero_bnd;
450
451 if (flag_chkp_use_static_const_bounds)
452 zero_bnd = chkp_get_zero_bounds_var ();
453 else
454 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
455 integer_zero_node);
456 return expand_normal (zero_bnd);
457 }
458
459 /* Emit code to store zero bounds for PTR located at MEM. */
460 void
461 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
462 {
463 tree zero_bnd, bnd, addr, bndstx;
464
465 if (flag_chkp_use_static_const_bounds)
466 zero_bnd = chkp_get_zero_bounds_var ();
467 else
468 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
469 integer_zero_node);
470 bnd = make_tree (pointer_bounds_type_node,
471 assign_temp (pointer_bounds_type_node, 0, 1));
472 addr = build1 (ADDR_EXPR,
473 build_pointer_type (TREE_TYPE (mem)), mem);
474 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
475
476 expand_assignment (bnd, zero_bnd, false);
477 expand_normal (bndstx);
478 }
479
480 /* Build retbnd call for returned value RETVAL.
481
482 If BNDVAL is not NULL then result is stored
483 in it. Otherwise a temporary is created to
484 hold returned value.
485
486 GSI points to a position for a retbnd call
487 and is set to created stmt.
488
489 Cgraph edge is created for a new call if
490 UPDATE_EDGE is 1.
491
492 Obtained bounds are returned. */
493 tree
494 chkp_insert_retbnd_call (tree bndval, tree retval,
495 gimple_stmt_iterator *gsi)
496 {
497 gimple *call;
498
499 if (!bndval)
500 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
501
502 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
503 gimple_call_set_lhs (call, bndval);
504 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
505
506 return bndval;
507 }
508
509 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
510 arguments. */
511
512 gcall *
513 chkp_copy_call_skip_bounds (gcall *call)
514 {
515 bitmap bounds;
516 unsigned i;
517
518 bitmap_obstack_initialize (NULL);
519 bounds = BITMAP_ALLOC (NULL);
520
521 for (i = 0; i < gimple_call_num_args (call); i++)
522 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
523 bitmap_set_bit (bounds, i);
524
525 if (!bitmap_empty_p (bounds))
526 call = gimple_call_copy_skip_args (call, bounds);
527 gimple_call_set_with_bounds (call, false);
528
529 BITMAP_FREE (bounds);
530 bitmap_obstack_release (NULL);
531
532 return call;
533 }
534
535 /* Redirect edge E to the correct node according to call_stmt.
536 Return 1 if bounds removal from call_stmt should be done
537 instead of redirection. */
538
539 bool
540 chkp_redirect_edge (cgraph_edge *e)
541 {
542 bool instrumented = false;
543 tree decl = e->callee->decl;
544
545 if (e->callee->instrumentation_clone
546 || chkp_function_instrumented_p (decl))
547 instrumented = true;
548
549 if (instrumented
550 && !gimple_call_with_bounds_p (e->call_stmt))
551 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
552 else if (!instrumented
553 && gimple_call_with_bounds_p (e->call_stmt)
554 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
555 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
556 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
557 {
558 if (e->callee->instrumented_version)
559 e->redirect_callee (e->callee->instrumented_version);
560 else
561 {
562 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
563 /* Avoid bounds removal if all args will be removed. */
564 if (!args || TREE_VALUE (args) != void_type_node)
565 return true;
566 else
567 gimple_call_set_with_bounds (e->call_stmt, false);
568 }
569 }
570
571 return false;
572 }
573
574 /* Mark statement S to not be instrumented. */
575 static void
576 chkp_mark_stmt (gimple *s)
577 {
578 gimple_set_plf (s, GF_PLF_1, true);
579 }
580
581 /* Mark statement S to be instrumented. */
582 static void
583 chkp_unmark_stmt (gimple *s)
584 {
585 gimple_set_plf (s, GF_PLF_1, false);
586 }
587
588 /* Return 1 if statement S should not be instrumented. */
589 static bool
590 chkp_marked_stmt_p (gimple *s)
591 {
592 return gimple_plf (s, GF_PLF_1);
593 }
594
595 /* Get var to be used for bound temps. */
596 static tree
597 chkp_get_tmp_var (void)
598 {
599 if (!tmp_var)
600 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
601
602 return tmp_var;
603 }
604
605 /* Get SSA_NAME to be used as temp. */
606 static tree
607 chkp_get_tmp_reg (gimple *stmt)
608 {
609 if (in_chkp_pass)
610 return make_ssa_name (chkp_get_tmp_var (), stmt);
611
612 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
613 CHKP_BOUND_TMP_NAME);
614 }
615
616 /* Get var to be used for size temps. */
617 static tree
618 chkp_get_size_tmp_var (void)
619 {
620 if (!size_tmp_var)
621 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
622
623 return size_tmp_var;
624 }
625
626 /* Register bounds BND for address of OBJ. */
627 static void
628 chkp_register_addr_bounds (tree obj, tree bnd)
629 {
630 if (bnd == incomplete_bounds)
631 return;
632
633 chkp_reg_addr_bounds->put (obj, bnd);
634
635 if (dump_file && (dump_flags & TDF_DETAILS))
636 {
637 fprintf (dump_file, "Regsitered bound ");
638 print_generic_expr (dump_file, bnd);
639 fprintf (dump_file, " for address of ");
640 print_generic_expr (dump_file, obj);
641 fprintf (dump_file, "\n");
642 }
643 }
644
645 /* Return bounds registered for address of OBJ. */
646 static tree
647 chkp_get_registered_addr_bounds (tree obj)
648 {
649 tree *slot = chkp_reg_addr_bounds->get (obj);
650 return slot ? *slot : NULL_TREE;
651 }
652
653 /* Mark BOUNDS as completed. */
654 static void
655 chkp_mark_completed_bounds (tree bounds)
656 {
657 chkp_completed_bounds_set->add (bounds);
658
659 if (dump_file && (dump_flags & TDF_DETAILS))
660 {
661 fprintf (dump_file, "Marked bounds ");
662 print_generic_expr (dump_file, bounds);
663 fprintf (dump_file, " as completed\n");
664 }
665 }
666
667 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
668 static bool
669 chkp_completed_bounds (tree bounds)
670 {
671 return chkp_completed_bounds_set->contains (bounds);
672 }
673
674 /* Clear comleted bound marks. */
675 static void
676 chkp_erase_completed_bounds (void)
677 {
678 delete chkp_completed_bounds_set;
679 chkp_completed_bounds_set = new hash_set<tree>;
680 }
681
682 /* Mark BOUNDS associated with PTR as incomplete. */
683 static void
684 chkp_register_incomplete_bounds (tree bounds, tree ptr)
685 {
686 chkp_incomplete_bounds_map->put (bounds, ptr);
687
688 if (dump_file && (dump_flags & TDF_DETAILS))
689 {
690 fprintf (dump_file, "Regsitered incomplete bounds ");
691 print_generic_expr (dump_file, bounds);
692 fprintf (dump_file, " for ");
693 print_generic_expr (dump_file, ptr);
694 fprintf (dump_file, "\n");
695 }
696 }
697
698 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
699 static bool
700 chkp_incomplete_bounds (tree bounds)
701 {
702 if (bounds == incomplete_bounds)
703 return true;
704
705 if (chkp_completed_bounds (bounds))
706 return false;
707
708 return chkp_incomplete_bounds_map->get (bounds) != NULL;
709 }
710
711 /* Clear incomleted bound marks. */
712 static void
713 chkp_erase_incomplete_bounds (void)
714 {
715 delete chkp_incomplete_bounds_map;
716 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
717 }
718
719 /* Build and return bndmk call which creates bounds for structure
720 pointed by PTR. Structure should have complete type. */
721 tree
722 chkp_make_bounds_for_struct_addr (tree ptr)
723 {
724 tree type = TREE_TYPE (ptr);
725 tree size;
726
727 gcc_assert (POINTER_TYPE_P (type));
728
729 size = TYPE_SIZE (TREE_TYPE (type));
730
731 gcc_assert (size);
732
733 return build_call_nary (pointer_bounds_type_node,
734 build_fold_addr_expr (chkp_bndmk_fndecl),
735 2, ptr, size);
736 }
737
738 /* Traversal function for chkp_may_finish_incomplete_bounds.
739 Set RES to 0 if at least one argument of phi statement
740 defining bounds (passed in KEY arg) is unknown.
741 Traversal stops when first unknown phi argument is found. */
742 bool
743 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
744 bool *res)
745 {
746 gimple *phi;
747 unsigned i;
748
749 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
750
751 phi = SSA_NAME_DEF_STMT (bounds);
752
753 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
754
755 for (i = 0; i < gimple_phi_num_args (phi); i++)
756 {
757 tree phi_arg = gimple_phi_arg_def (phi, i);
758 if (!phi_arg)
759 {
760 *res = false;
761 /* Do not need to traverse further. */
762 return false;
763 }
764 }
765
766 return true;
767 }
768
769 /* Return 1 if all phi nodes created for bounds have their
770 arguments computed. */
771 static bool
772 chkp_may_finish_incomplete_bounds (void)
773 {
774 bool res = true;
775
776 chkp_incomplete_bounds_map
777 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
778
779 return res;
780 }
781
782 /* Helper function for chkp_finish_incomplete_bounds.
783 Recompute args for bounds phi node. */
784 bool
785 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
786 void *res ATTRIBUTE_UNUSED)
787 {
788 tree ptr = *slot;
789 gphi *bounds_phi;
790 gphi *ptr_phi;
791 unsigned i;
792
793 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
794 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
795
796 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
797 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
798
799 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
800 {
801 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
802 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
803
804 add_phi_arg (bounds_phi, bound_arg,
805 gimple_phi_arg_edge (ptr_phi, i),
806 UNKNOWN_LOCATION);
807 }
808
809 return true;
810 }
811
812 /* Mark BOUNDS as invalid. */
813 static void
814 chkp_mark_invalid_bounds (tree bounds)
815 {
816 chkp_invalid_bounds->add (bounds);
817
818 if (dump_file && (dump_flags & TDF_DETAILS))
819 {
820 fprintf (dump_file, "Marked bounds ");
821 print_generic_expr (dump_file, bounds);
822 fprintf (dump_file, " as invalid\n");
823 }
824 }
825
826 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
827 static bool
828 chkp_valid_bounds (tree bounds)
829 {
830 if (bounds == zero_bounds || bounds == none_bounds)
831 return false;
832
833 return !chkp_invalid_bounds->contains (bounds);
834 }
835
836 /* Helper function for chkp_finish_incomplete_bounds.
837 Check all arguments of phi nodes trying to find
838 valid completed bounds. If there is at least one
839 such arg then bounds produced by phi node are marked
840 as valid completed bounds and all phi args are
841 recomputed. */
842 bool
843 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
844 {
845 gimple *phi;
846 unsigned i;
847
848 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
849
850 if (chkp_completed_bounds (bounds))
851 return true;
852
853 phi = SSA_NAME_DEF_STMT (bounds);
854
855 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
856
857 for (i = 0; i < gimple_phi_num_args (phi); i++)
858 {
859 tree phi_arg = gimple_phi_arg_def (phi, i);
860
861 gcc_assert (phi_arg);
862
863 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
864 {
865 *res = true;
866 chkp_mark_completed_bounds (bounds);
867 chkp_recompute_phi_bounds (bounds, slot, NULL);
868 return true;
869 }
870 }
871
872 return true;
873 }
874
875 /* Helper function for chkp_finish_incomplete_bounds.
876 Marks all incompleted bounds as invalid. */
877 bool
878 chkp_mark_invalid_bounds_walker (tree const &bounds,
879 tree *slot ATTRIBUTE_UNUSED,
880 void *res ATTRIBUTE_UNUSED)
881 {
882 if (!chkp_completed_bounds (bounds))
883 {
884 chkp_mark_invalid_bounds (bounds);
885 chkp_mark_completed_bounds (bounds);
886 }
887 return true;
888 }
889
890 /* When all bound phi nodes have all their args computed
891 we have enough info to find valid bounds. We iterate
892 through all incompleted bounds searching for valid
893 bounds. Found valid bounds are marked as completed
894 and all remaining incompleted bounds are recomputed.
895 Process continues until no new valid bounds may be
896 found. All remained incompleted bounds are marked as
897 invalid (i.e. have no valid source of bounds). */
898 static void
899 chkp_finish_incomplete_bounds (void)
900 {
901 bool found_valid = true;
902
903 while (found_valid)
904 {
905 found_valid = false;
906
907 chkp_incomplete_bounds_map->
908 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
909
910 if (found_valid)
911 chkp_incomplete_bounds_map->
912 traverse<void *, chkp_recompute_phi_bounds> (NULL);
913 }
914
915 chkp_incomplete_bounds_map->
916 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
917 chkp_incomplete_bounds_map->
918 traverse<void *, chkp_recompute_phi_bounds> (NULL);
919
920 chkp_erase_completed_bounds ();
921 chkp_erase_incomplete_bounds ();
922 }
923
924 /* Return 1 if type TYPE is a pointer type or a
925 structure having a pointer type as one of its fields.
926 Otherwise return 0. */
927 bool
928 chkp_type_has_pointer (const_tree type)
929 {
930 bool res = false;
931
932 if (BOUNDED_TYPE_P (type))
933 res = true;
934 else if (RECORD_OR_UNION_TYPE_P (type))
935 {
936 tree field;
937
938 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
939 if (TREE_CODE (field) == FIELD_DECL)
940 res = res || chkp_type_has_pointer (TREE_TYPE (field));
941 }
942 else if (TREE_CODE (type) == ARRAY_TYPE)
943 res = chkp_type_has_pointer (TREE_TYPE (type));
944
945 return res;
946 }
947
948 unsigned
949 chkp_type_bounds_count (const_tree type)
950 {
951 unsigned res = 0;
952
953 if (!type)
954 res = 0;
955 else if (BOUNDED_TYPE_P (type))
956 res = 1;
957 else if (RECORD_OR_UNION_TYPE_P (type))
958 {
959 bitmap have_bound;
960
961 bitmap_obstack_initialize (NULL);
962 have_bound = BITMAP_ALLOC (NULL);
963 chkp_find_bound_slots (type, have_bound);
964 res = bitmap_count_bits (have_bound);
965 BITMAP_FREE (have_bound);
966 bitmap_obstack_release (NULL);
967 }
968
969 return res;
970 }
971
972 /* Get bounds associated with NODE via
973 chkp_set_bounds call. */
974 tree
975 chkp_get_bounds (tree node)
976 {
977 tree *slot;
978
979 if (!chkp_bounds_map)
980 return NULL_TREE;
981
982 slot = chkp_bounds_map->get (node);
983 return slot ? *slot : NULL_TREE;
984 }
985
986 /* Associate bounds VAL with NODE. */
987 void
988 chkp_set_bounds (tree node, tree val)
989 {
990 if (!chkp_bounds_map)
991 chkp_bounds_map = new hash_map<tree, tree>;
992
993 chkp_bounds_map->put (node, val);
994 }
995
996 /* Check if statically initialized variable VAR require
997 static bounds initialization. If VAR is added into
998 bounds initlization list then 1 is returned. Otherwise
999 return 0. */
1000 extern bool
1001 chkp_register_var_initializer (tree var)
1002 {
1003 if (!flag_check_pointer_bounds
1004 || DECL_INITIAL (var) == error_mark_node)
1005 return false;
1006
1007 gcc_assert (VAR_P (var));
1008 gcc_assert (DECL_INITIAL (var));
1009
1010 if (TREE_STATIC (var)
1011 && chkp_type_has_pointer (TREE_TYPE (var)))
1012 {
1013 varpool_node::get_create (var)->need_bounds_init = 1;
1014 return true;
1015 }
1016
1017 return false;
1018 }
1019
1020 /* Helper function for chkp_finish_file.
1021
1022 Add new modification statement (RHS is assigned to LHS)
1023 into list of static initializer statementes (passed in ARG).
1024 If statements list becomes too big, emit checker constructor
1025 and start the new one. */
1026 static void
1027 chkp_add_modification_to_stmt_list (tree lhs,
1028 tree rhs,
1029 void *arg)
1030 {
1031 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1032 tree modify;
1033
1034 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1035 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1036
1037 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1038 append_to_statement_list (modify, &stmts->stmts);
1039
1040 stmts->avail--;
1041 }
1042
1043 /* Build and return ADDR_EXPR for specified object OBJ. */
1044 static tree
1045 chkp_build_addr_expr (tree obj)
1046 {
1047 return TREE_CODE (obj) == TARGET_MEM_REF
1048 ? tree_mem_ref_addr (ptr_type_node, obj)
1049 : build_fold_addr_expr (obj);
1050 }
1051
1052 /* Helper function for chkp_finish_file.
1053 Initialize bound variable BND_VAR with bounds of variable
1054 VAR to statements list STMTS. If statements list becomes
1055 too big, emit checker constructor and start the new one. */
1056 static void
1057 chkp_output_static_bounds (tree bnd_var, tree var,
1058 struct chkp_ctor_stmt_list *stmts)
1059 {
1060 tree lb, ub, size;
1061
1062 if (TREE_CODE (var) == STRING_CST)
1063 {
1064 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1065 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1066 }
1067 else if (DECL_SIZE (var)
1068 && !chkp_variable_size_type (TREE_TYPE (var)))
1069 {
1070 /* Compute bounds using statically known size. */
1071 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1072 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1073 }
1074 else
1075 {
1076 /* Compute bounds using dynamic size. */
1077 tree call;
1078
1079 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1080 call = build1 (ADDR_EXPR,
1081 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1082 chkp_sizeof_fndecl);
1083 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1084 call, 1, var);
1085
1086 if (flag_chkp_zero_dynamic_size_as_infinite)
1087 {
1088 tree max_size, cond;
1089
1090 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1091 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1092 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1093 }
1094
1095 size = size_binop (MINUS_EXPR, size, size_one_node);
1096 }
1097
1098 ub = size_binop (PLUS_EXPR, lb, size);
1099 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1100 &stmts->stmts);
1101 if (stmts->avail <= 0)
1102 {
1103 cgraph_build_static_cdtor ('B', stmts->stmts,
1104 MAX_RESERVED_INIT_PRIORITY + 2);
1105 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1106 stmts->stmts = NULL;
1107 }
1108 }
1109
1110 /* Return entry block to be used for checker initilization code.
1111 Create new block if required. */
1112 static basic_block
1113 chkp_get_entry_block (void)
1114 {
1115 if (!entry_block)
1116 entry_block
1117 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1118
1119 return entry_block;
1120 }
1121
1122 /* Return a bounds var to be used for pointer var PTR_VAR. */
1123 static tree
1124 chkp_get_bounds_var (tree ptr_var)
1125 {
1126 tree bnd_var;
1127 tree *slot;
1128
1129 slot = chkp_bound_vars->get (ptr_var);
1130 if (slot)
1131 bnd_var = *slot;
1132 else
1133 {
1134 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1135 CHKP_BOUND_TMP_NAME);
1136 chkp_bound_vars->put (ptr_var, bnd_var);
1137 }
1138
1139 return bnd_var;
1140 }
1141
1142 /* If BND is an abnormal bounds copy, return a copied value.
1143 Otherwise return BND. */
1144 static tree
1145 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1146 {
1147 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1148 {
1149 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1150 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1151 bnd = gimple_assign_rhs1 (bnd_def);
1152 }
1153
1154 return bnd;
1155 }
1156
1157 /* Register bounds BND for object PTR in global bounds table.
1158 A copy of bounds may be created for abnormal ssa names.
1159 Returns bounds to use for PTR. */
1160 static tree
1161 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1162 {
1163 bool abnormal_ptr;
1164
1165 if (!chkp_reg_bounds)
1166 return bnd;
1167
1168 /* Do nothing if bounds are incomplete_bounds
1169 because it means bounds will be recomputed. */
1170 if (bnd == incomplete_bounds)
1171 return bnd;
1172
1173 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1174 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1175 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1176
1177 /* A single bounds value may be reused multiple times for
1178 different pointer values. It may cause coalescing issues
1179 for abnormal SSA names. To avoid it we create a bounds
1180 copy in case it is computed for abnormal SSA name.
1181
1182 We also cannot reuse such created copies for other pointers */
1183 if (abnormal_ptr
1184 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1185 {
1186 tree bnd_var = NULL_TREE;
1187
1188 if (abnormal_ptr)
1189 {
1190 if (SSA_NAME_VAR (ptr))
1191 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1192 }
1193 else
1194 bnd_var = chkp_get_tmp_var ();
1195
1196 /* For abnormal copies we may just find original
1197 bounds and use them. */
1198 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1199 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1200 /* For undefined values we usually use none bounds
1201 value but in case of abnormal edge it may cause
1202 coalescing failures. Use default definition of
1203 bounds variable instead to avoid it. */
1204 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1205 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1206 {
1207 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1208
1209 if (dump_file && (dump_flags & TDF_DETAILS))
1210 {
1211 fprintf (dump_file, "Using default def bounds ");
1212 print_generic_expr (dump_file, bnd);
1213 fprintf (dump_file, " for abnormal default def SSA name ");
1214 print_generic_expr (dump_file, ptr);
1215 fprintf (dump_file, "\n");
1216 }
1217 }
1218 else
1219 {
1220 tree copy;
1221 gimple *def = SSA_NAME_DEF_STMT (ptr);
1222 gimple *assign;
1223 gimple_stmt_iterator gsi;
1224
1225 if (bnd_var)
1226 copy = make_ssa_name (bnd_var);
1227 else
1228 copy = make_temp_ssa_name (pointer_bounds_type_node,
1229 NULL,
1230 CHKP_BOUND_TMP_NAME);
1231 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1232 assign = gimple_build_assign (copy, bnd);
1233
1234 if (dump_file && (dump_flags & TDF_DETAILS))
1235 {
1236 fprintf (dump_file, "Creating a copy of bounds ");
1237 print_generic_expr (dump_file, bnd);
1238 fprintf (dump_file, " for abnormal SSA name ");
1239 print_generic_expr (dump_file, ptr);
1240 fprintf (dump_file, "\n");
1241 }
1242
1243 if (gimple_code (def) == GIMPLE_NOP)
1244 {
1245 gsi = gsi_last_bb (chkp_get_entry_block ());
1246 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1247 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1248 else
1249 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1250 }
1251 else
1252 {
1253 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1254 /* Sometimes (e.g. when we load a pointer from a
1255 memory) bounds are produced later than a pointer.
1256 We need to insert bounds copy appropriately. */
1257 if (gimple_code (bnd_def) != GIMPLE_NOP
1258 && stmt_dominates_stmt_p (def, bnd_def))
1259 gsi = gsi_for_stmt (bnd_def);
1260 else
1261 gsi = gsi_for_stmt (def);
1262 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1263 }
1264
1265 bnd = copy;
1266 }
1267
1268 if (abnormal_ptr)
1269 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1270 }
1271
1272 chkp_reg_bounds->put (ptr, bnd);
1273
1274 if (dump_file && (dump_flags & TDF_DETAILS))
1275 {
1276 fprintf (dump_file, "Regsitered bound ");
1277 print_generic_expr (dump_file, bnd);
1278 fprintf (dump_file, " for pointer ");
1279 print_generic_expr (dump_file, ptr);
1280 fprintf (dump_file, "\n");
1281 }
1282
1283 return bnd;
1284 }
1285
1286 /* Get bounds registered for object PTR in global bounds table. */
1287 static tree
1288 chkp_get_registered_bounds (tree ptr)
1289 {
1290 tree *slot;
1291
1292 if (!chkp_reg_bounds)
1293 return NULL_TREE;
1294
1295 slot = chkp_reg_bounds->get (ptr);
1296 return slot ? *slot : NULL_TREE;
1297 }
1298
1299 /* Add bound retvals to return statement pointed by GSI. */
1300
1301 static void
1302 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1303 {
1304 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1305 tree retval = gimple_return_retval (ret);
1306 tree ret_decl = DECL_RESULT (cfun->decl);
1307 tree bounds;
1308
1309 if (!retval)
1310 return;
1311
1312 if (BOUNDED_P (ret_decl))
1313 {
1314 bounds = chkp_find_bounds (retval, gsi);
1315 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1316 gimple_return_set_retbnd (ret, bounds);
1317 }
1318
1319 update_stmt (ret);
1320 }
1321
1322 /* Force OP to be suitable for using as an argument for call.
1323 New statements (if any) go to SEQ. */
1324 static tree
1325 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1326 {
1327 gimple_seq stmts;
1328 gimple_stmt_iterator si;
1329
1330 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1331
1332 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1333 chkp_mark_stmt (gsi_stmt (si));
1334
1335 gimple_seq_add_seq (seq, stmts);
1336
1337 return op;
1338 }
1339
1340 /* Generate lower bound check for memory access by ADDR.
1341 Check is inserted before the position pointed by ITER.
1342 DIRFLAG indicates whether memory access is load or store. */
1343 static void
1344 chkp_check_lower (tree addr, tree bounds,
1345 gimple_stmt_iterator iter,
1346 location_t location,
1347 tree dirflag)
1348 {
1349 gimple_seq seq;
1350 gimple *check;
1351 tree node;
1352
1353 if (!chkp_function_instrumented_p (current_function_decl)
1354 && bounds == chkp_get_zero_bounds ())
1355 return;
1356
1357 if (dirflag == integer_zero_node
1358 && !flag_chkp_check_read)
1359 return;
1360
1361 if (dirflag == integer_one_node
1362 && !flag_chkp_check_write)
1363 return;
1364
1365 seq = NULL;
1366
1367 node = chkp_force_gimple_call_op (addr, &seq);
1368
1369 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1370 chkp_mark_stmt (check);
1371 gimple_call_set_with_bounds (check, true);
1372 gimple_set_location (check, location);
1373 gimple_seq_add_stmt (&seq, check);
1374
1375 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1376
1377 if (dump_file && (dump_flags & TDF_DETAILS))
1378 {
1379 gimple *before = gsi_stmt (iter);
1380 fprintf (dump_file, "Generated lower bound check for statement ");
1381 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1382 fprintf (dump_file, " ");
1383 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1384 }
1385 }
1386
1387 /* Generate upper bound check for memory access by ADDR.
1388 Check is inserted before the position pointed by ITER.
1389 DIRFLAG indicates whether memory access is load or store. */
1390 static void
1391 chkp_check_upper (tree addr, tree bounds,
1392 gimple_stmt_iterator iter,
1393 location_t location,
1394 tree dirflag)
1395 {
1396 gimple_seq seq;
1397 gimple *check;
1398 tree node;
1399
1400 if (!chkp_function_instrumented_p (current_function_decl)
1401 && bounds == chkp_get_zero_bounds ())
1402 return;
1403
1404 if (dirflag == integer_zero_node
1405 && !flag_chkp_check_read)
1406 return;
1407
1408 if (dirflag == integer_one_node
1409 && !flag_chkp_check_write)
1410 return;
1411
1412 seq = NULL;
1413
1414 node = chkp_force_gimple_call_op (addr, &seq);
1415
1416 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1417 chkp_mark_stmt (check);
1418 gimple_call_set_with_bounds (check, true);
1419 gimple_set_location (check, location);
1420 gimple_seq_add_stmt (&seq, check);
1421
1422 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1423
1424 if (dump_file && (dump_flags & TDF_DETAILS))
1425 {
1426 gimple *before = gsi_stmt (iter);
1427 fprintf (dump_file, "Generated upper bound check for statement ");
1428 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1429 fprintf (dump_file, " ");
1430 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1431 }
1432 }
1433
1434 /* Generate lower and upper bound checks for memory access
1435 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1436 are inserted before the position pointed by ITER.
1437 DIRFLAG indicates whether memory access is load or store. */
1438 void
1439 chkp_check_mem_access (tree first, tree last, tree bounds,
1440 gimple_stmt_iterator iter,
1441 location_t location,
1442 tree dirflag)
1443 {
1444 chkp_check_lower (first, bounds, iter, location, dirflag);
1445 chkp_check_upper (last, bounds, iter, location, dirflag);
1446 }
1447
1448 /* Replace call to _bnd_chk_* pointed by GSI with
1449 bndcu and bndcl calls. DIRFLAG determines whether
1450 check is for read or write. */
1451
1452 void
1453 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1454 tree dirflag)
1455 {
1456 gimple_stmt_iterator call_iter = *gsi;
1457 gimple *call = gsi_stmt (*gsi);
1458 tree fndecl = gimple_call_fndecl (call);
1459 tree addr = gimple_call_arg (call, 0);
1460 tree bounds = chkp_find_bounds (addr, gsi);
1461
1462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1463 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1464 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1465
1466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1467 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1468
1469 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1470 {
1471 tree size = gimple_call_arg (call, 1);
1472 addr = fold_build_pointer_plus (addr, size);
1473 addr = fold_build_pointer_plus_hwi (addr, -1);
1474 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1475 }
1476
1477 gsi_remove (&call_iter, true);
1478 }
1479
1480 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1481 corresponding bounds extract call. */
1482
1483 void
1484 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1485 {
1486 gimple *call = gsi_stmt (*gsi);
1487 tree fndecl = gimple_call_fndecl (call);
1488 tree addr = gimple_call_arg (call, 0);
1489 tree bounds = chkp_find_bounds (addr, gsi);
1490 gimple *extract;
1491
1492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1493 fndecl = chkp_extract_lower_fndecl;
1494 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1495 fndecl = chkp_extract_upper_fndecl;
1496 else
1497 gcc_unreachable ();
1498
1499 extract = gimple_build_call (fndecl, 1, bounds);
1500 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1501 chkp_mark_stmt (extract);
1502
1503 gsi_replace (gsi, extract, false);
1504 }
1505
1506 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1507 static tree
1508 chkp_build_component_ref (tree obj, tree field)
1509 {
1510 tree res;
1511
1512 /* If object is TMR then we do not use component_ref but
1513 add offset instead. We need it to be able to get addr
1514 of the reasult later. */
1515 if (TREE_CODE (obj) == TARGET_MEM_REF)
1516 {
1517 tree offs = TMR_OFFSET (obj);
1518 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1519 offs, DECL_FIELD_OFFSET (field));
1520
1521 gcc_assert (offs);
1522
1523 res = copy_node (obj);
1524 TREE_TYPE (res) = TREE_TYPE (field);
1525 TMR_OFFSET (res) = offs;
1526 }
1527 else
1528 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1529
1530 return res;
1531 }
1532
1533 /* Return ARRAY_REF for array ARR and index IDX with
1534 specified element type ETYPE and element size ESIZE. */
1535 static tree
1536 chkp_build_array_ref (tree arr, tree etype, tree esize,
1537 unsigned HOST_WIDE_INT idx)
1538 {
1539 tree index = build_int_cst (size_type_node, idx);
1540 tree res;
1541
1542 /* If object is TMR then we do not use array_ref but
1543 add offset instead. We need it to be able to get addr
1544 of the reasult later. */
1545 if (TREE_CODE (arr) == TARGET_MEM_REF)
1546 {
1547 tree offs = TMR_OFFSET (arr);
1548
1549 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1550 esize, index);
1551 gcc_assert(esize);
1552
1553 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1554 offs, esize);
1555 gcc_assert (offs);
1556
1557 res = copy_node (arr);
1558 TREE_TYPE (res) = etype;
1559 TMR_OFFSET (res) = offs;
1560 }
1561 else
1562 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1563
1564 return res;
1565 }
1566
1567 /* Helper function for chkp_add_bounds_to_call_stmt.
1568 Fill ALL_BOUNDS output array with created bounds.
1569
1570 OFFS is used for recursive calls and holds basic
1571 offset of TYPE in outer structure in bits.
1572
1573 ITER points a position where bounds are searched.
1574
1575 ALL_BOUNDS[i] is filled with elem bounds if there
1576 is a field in TYPE which has pointer type and offset
1577 equal to i * POINTER_SIZE in bits. */
1578 static void
1579 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1580 HOST_WIDE_INT offs,
1581 gimple_stmt_iterator *iter)
1582 {
1583 tree type = TREE_TYPE (elem);
1584
1585 if (BOUNDED_TYPE_P (type))
1586 {
1587 if (!all_bounds[offs / POINTER_SIZE])
1588 {
1589 tree temp = make_temp_ssa_name (type, NULL, "");
1590 gimple *assign = gimple_build_assign (temp, elem);
1591 gimple_stmt_iterator gsi;
1592
1593 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1594 gsi = gsi_for_stmt (assign);
1595
1596 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1597 }
1598 }
1599 else if (RECORD_OR_UNION_TYPE_P (type))
1600 {
1601 tree field;
1602
1603 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1604 if (TREE_CODE (field) == FIELD_DECL)
1605 {
1606 tree base = unshare_expr (elem);
1607 tree field_ref = chkp_build_component_ref (base, field);
1608 HOST_WIDE_INT field_offs
1609 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1610 if (DECL_FIELD_OFFSET (field))
1611 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1612
1613 chkp_find_bounds_for_elem (field_ref, all_bounds,
1614 offs + field_offs, iter);
1615 }
1616 }
1617 else if (TREE_CODE (type) == ARRAY_TYPE)
1618 {
1619 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1620 tree etype = TREE_TYPE (type);
1621 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1622 unsigned HOST_WIDE_INT cur;
1623
1624 if (!maxval || integer_minus_onep (maxval))
1625 return;
1626
1627 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1628 {
1629 tree base = unshare_expr (elem);
1630 tree arr_elem = chkp_build_array_ref (base, etype,
1631 TYPE_SIZE (etype),
1632 cur);
1633 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1634 iter);
1635 }
1636 }
1637 }
1638
1639 /* Fill HAVE_BOUND output bitmap with information about
1640 bounds requred for object of type TYPE.
1641
1642 OFFS is used for recursive calls and holds basic
1643 offset of TYPE in outer structure in bits.
1644
1645 HAVE_BOUND[i] is set to 1 if there is a field
1646 in TYPE which has pointer type and offset
1647 equal to i * POINTER_SIZE - OFFS in bits. */
1648 void
1649 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1650 HOST_WIDE_INT offs)
1651 {
1652 if (BOUNDED_TYPE_P (type))
1653 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1654 else if (RECORD_OR_UNION_TYPE_P (type))
1655 {
1656 tree field;
1657
1658 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1659 if (TREE_CODE (field) == FIELD_DECL)
1660 {
1661 HOST_WIDE_INT field_offs = 0;
1662 if (DECL_FIELD_BIT_OFFSET (field))
1663 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1664 if (DECL_FIELD_OFFSET (field))
1665 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1666 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1667 offs + field_offs);
1668 }
1669 }
1670 else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1671 {
1672 /* The object type is an array of complete type, i.e., other
1673 than a flexible array. */
1674 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1675 tree etype = TREE_TYPE (type);
1676 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1677 unsigned HOST_WIDE_INT cur;
1678
1679 if (!maxval
1680 || TREE_CODE (maxval) != INTEGER_CST
1681 || integer_minus_onep (maxval))
1682 return;
1683
1684 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1685 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1686 }
1687 }
1688
1689 /* Fill bitmap RES with information about bounds for
1690 type TYPE. See chkp_find_bound_slots_1 for more
1691 details. */
1692 void
1693 chkp_find_bound_slots (const_tree type, bitmap res)
1694 {
1695 bitmap_clear (res);
1696 chkp_find_bound_slots_1 (type, res, 0);
1697 }
1698
1699 /* Return 1 if call to FNDECL should be instrumented
1700 and 0 otherwise. */
1701
1702 static bool
1703 chkp_instrument_normal_builtin (tree fndecl)
1704 {
1705 switch (DECL_FUNCTION_CODE (fndecl))
1706 {
1707 case BUILT_IN_STRLEN:
1708 case BUILT_IN_STRCPY:
1709 case BUILT_IN_STRNCPY:
1710 case BUILT_IN_STPCPY:
1711 case BUILT_IN_STPNCPY:
1712 case BUILT_IN_STRCAT:
1713 case BUILT_IN_STRNCAT:
1714 case BUILT_IN_MEMCPY:
1715 case BUILT_IN_MEMPCPY:
1716 case BUILT_IN_MEMSET:
1717 case BUILT_IN_MEMMOVE:
1718 case BUILT_IN_BZERO:
1719 case BUILT_IN_STRCMP:
1720 case BUILT_IN_STRNCMP:
1721 case BUILT_IN_BCMP:
1722 case BUILT_IN_MEMCMP:
1723 case BUILT_IN_MEMCPY_CHK:
1724 case BUILT_IN_MEMPCPY_CHK:
1725 case BUILT_IN_MEMMOVE_CHK:
1726 case BUILT_IN_MEMSET_CHK:
1727 case BUILT_IN_STRCPY_CHK:
1728 case BUILT_IN_STRNCPY_CHK:
1729 case BUILT_IN_STPCPY_CHK:
1730 case BUILT_IN_STPNCPY_CHK:
1731 case BUILT_IN_STRCAT_CHK:
1732 case BUILT_IN_STRNCAT_CHK:
1733 case BUILT_IN_MALLOC:
1734 case BUILT_IN_CALLOC:
1735 case BUILT_IN_REALLOC:
1736 return 1;
1737
1738 default:
1739 return 0;
1740 }
1741 }
1742
1743 /* Add bound arguments to call statement pointed by GSI.
1744 Also performs a replacement of user checker builtins calls
1745 with internal ones. */
1746
1747 static void
1748 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1749 {
1750 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1751 unsigned arg_no = 0;
1752 tree fndecl = gimple_call_fndecl (call);
1753 tree fntype;
1754 tree first_formal_arg;
1755 tree arg;
1756 bool use_fntype = false;
1757 tree op;
1758 ssa_op_iter iter;
1759 gcall *new_call;
1760
1761 /* Do nothing for internal functions. */
1762 if (gimple_call_internal_p (call))
1763 return;
1764
1765 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1766
1767 /* Do nothing if back-end builtin is called. */
1768 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1769 return;
1770
1771 /* Do nothing for some middle-end builtins. */
1772 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1773 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1774 return;
1775
1776 /* Do nothing for calls to not instrumentable functions. */
1777 if (fndecl && !chkp_instrumentable_p (fndecl))
1778 return;
1779
1780 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1781 and CHKP_COPY_PTR_BOUNDS. */
1782 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1783 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1784 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1785 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1786 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1787 return;
1788
1789 /* Check user builtins are replaced with checks. */
1790 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1791 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1792 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1793 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1794 {
1795 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1796 return;
1797 }
1798
1799 /* Check user builtins are replaced with bound extract. */
1800 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1801 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1802 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1803 {
1804 chkp_replace_extract_builtin (gsi);
1805 return;
1806 }
1807
1808 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1809 target narrow bounds call. */
1810 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1811 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1812 {
1813 tree arg = gimple_call_arg (call, 1);
1814 tree bounds = chkp_find_bounds (arg, gsi);
1815
1816 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1817 gimple_call_set_arg (call, 1, bounds);
1818 update_stmt (call);
1819
1820 return;
1821 }
1822
1823 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1824 bndstx call. */
1825 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1826 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1827 {
1828 tree addr = gimple_call_arg (call, 0);
1829 tree ptr = gimple_call_arg (call, 1);
1830 tree bounds = chkp_find_bounds (ptr, gsi);
1831 gimple_stmt_iterator iter = gsi_for_stmt (call);
1832
1833 chkp_build_bndstx (addr, ptr, bounds, gsi);
1834 gsi_remove (&iter, true);
1835
1836 return;
1837 }
1838
1839 if (!flag_chkp_instrument_calls)
1840 return;
1841
1842 /* We instrument only some subset of builtins. We also instrument
1843 builtin calls to be inlined. */
1844 if (fndecl
1845 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1846 && !chkp_instrument_normal_builtin (fndecl))
1847 {
1848 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1849 return;
1850
1851 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1852 if (!clone
1853 || !gimple_has_body_p (clone->decl))
1854 return;
1855 }
1856
1857 /* If function decl is available then use it for
1858 formal arguments list. Otherwise use function type. */
1859 if (fndecl
1860 && DECL_ARGUMENTS (fndecl)
1861 && gimple_call_fntype (call) == TREE_TYPE (fndecl))
1862 first_formal_arg = DECL_ARGUMENTS (fndecl);
1863 else
1864 {
1865 first_formal_arg = TYPE_ARG_TYPES (fntype);
1866 use_fntype = true;
1867 }
1868
1869 /* Fill vector of new call args. */
1870 vec<tree> new_args = vNULL;
1871 new_args.create (gimple_call_num_args (call));
1872 arg = first_formal_arg;
1873 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1874 {
1875 tree call_arg = gimple_call_arg (call, arg_no);
1876 tree type;
1877
1878 /* Get arg type using formal argument description
1879 or actual argument type. */
1880 if (arg)
1881 if (use_fntype)
1882 if (TREE_VALUE (arg) != void_type_node)
1883 {
1884 type = TREE_VALUE (arg);
1885 arg = TREE_CHAIN (arg);
1886 }
1887 else
1888 type = TREE_TYPE (call_arg);
1889 else
1890 {
1891 type = TREE_TYPE (arg);
1892 arg = TREE_CHAIN (arg);
1893 }
1894 else
1895 type = TREE_TYPE (call_arg);
1896
1897 new_args.safe_push (call_arg);
1898
1899 if (BOUNDED_TYPE_P (type)
1900 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1901 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1902 else if (chkp_type_has_pointer (type))
1903 {
1904 HOST_WIDE_INT max_bounds
1905 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1906 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1907 HOST_WIDE_INT bnd_no;
1908
1909 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1910
1911 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1912
1913 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1914 if (all_bounds[bnd_no])
1915 new_args.safe_push (all_bounds[bnd_no]);
1916
1917 free (all_bounds);
1918 }
1919 }
1920
1921 if (new_args.length () == gimple_call_num_args (call))
1922 new_call = call;
1923 else
1924 {
1925 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1926 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1927 gimple_call_copy_flags (new_call, call);
1928 gimple_call_set_chain (new_call, gimple_call_chain (call));
1929 }
1930 new_args.release ();
1931
1932 /* For direct calls fndecl is replaced with instrumented version. */
1933 if (fndecl)
1934 {
1935 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1936 gimple_call_set_fndecl (new_call, new_decl);
1937 /* In case of a type cast we should modify used function
1938 type instead of using type of new fndecl. */
1939 if (gimple_call_fntype (call) != TREE_TYPE (fndecl))
1940 {
1941 tree type = gimple_call_fntype (call);
1942 type = chkp_copy_function_type_adding_bounds (type);
1943 gimple_call_set_fntype (new_call, type);
1944 }
1945 else
1946 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1947 }
1948 /* For indirect call we should fix function pointer type if
1949 pass some bounds. */
1950 else if (new_call != call)
1951 {
1952 tree type = gimple_call_fntype (call);
1953 type = chkp_copy_function_type_adding_bounds (type);
1954 gimple_call_set_fntype (new_call, type);
1955 }
1956
1957 /* replace old call statement with the new one. */
1958 if (call != new_call)
1959 {
1960 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1961 {
1962 SSA_NAME_DEF_STMT (op) = new_call;
1963 }
1964 gsi_replace (gsi, new_call, true);
1965 }
1966 else
1967 update_stmt (new_call);
1968
1969 gimple_call_set_with_bounds (new_call, true);
1970 }
1971
1972 /* Return constant static bounds var with specified bounds LB and UB.
1973 If such var does not exists then new var is created with specified NAME. */
1974 static tree
1975 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1976 HOST_WIDE_INT ub,
1977 const char *name)
1978 {
1979 tree id = get_identifier (name);
1980 tree var;
1981 varpool_node *node;
1982 symtab_node *snode;
1983
1984 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1985 pointer_bounds_type_node);
1986 TREE_STATIC (var) = 1;
1987 TREE_PUBLIC (var) = 1;
1988
1989 /* With LTO we may have constant bounds already in varpool.
1990 Try to find it. */
1991 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1992 {
1993 /* We don't allow this symbol usage for non bounds. */
1994 if (snode->type != SYMTAB_VARIABLE
1995 || !POINTER_BOUNDS_P (snode->decl))
1996 sorry ("-fcheck-pointer-bounds requires %qs "
1997 "name for internal usage",
1998 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1999
2000 return snode->decl;
2001 }
2002
2003 TREE_USED (var) = 1;
2004 TREE_READONLY (var) = 1;
2005 TREE_ADDRESSABLE (var) = 0;
2006 DECL_ARTIFICIAL (var) = 1;
2007 DECL_READ_P (var) = 1;
2008 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2009 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2010 /* We may use this symbol during ctors generation in chkp_finish_file
2011 when all symbols are emitted. Force output to avoid undefined
2012 symbols in ctors. */
2013 node = varpool_node::get_create (var);
2014 node->force_output = 1;
2015
2016 varpool_node::finalize_decl (var);
2017
2018 return var;
2019 }
2020
2021 /* Generate code to make bounds with specified lower bound LB and SIZE.
2022 if AFTER is 1 then code is inserted after position pointed by ITER
2023 otherwise code is inserted before position pointed by ITER.
2024 If ITER is NULL then code is added to entry block. */
2025 static tree
2026 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2027 {
2028 gimple_seq seq;
2029 gimple_stmt_iterator gsi;
2030 gimple *stmt;
2031 tree bounds;
2032
2033 if (iter)
2034 gsi = *iter;
2035 else
2036 gsi = gsi_start_bb (chkp_get_entry_block ());
2037
2038 seq = NULL;
2039
2040 lb = chkp_force_gimple_call_op (lb, &seq);
2041 size = chkp_force_gimple_call_op (size, &seq);
2042
2043 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2044 chkp_mark_stmt (stmt);
2045
2046 bounds = chkp_get_tmp_reg (stmt);
2047 gimple_call_set_lhs (stmt, bounds);
2048
2049 gimple_seq_add_stmt (&seq, stmt);
2050
2051 if (iter && after)
2052 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2053 else
2054 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2055
2056 if (dump_file && (dump_flags & TDF_DETAILS))
2057 {
2058 fprintf (dump_file, "Made bounds: ");
2059 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2060 if (iter)
2061 {
2062 fprintf (dump_file, " inserted before statement: ");
2063 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2064 }
2065 else
2066 fprintf (dump_file, " at function entry\n");
2067 }
2068
2069 /* update_stmt (stmt); */
2070
2071 return bounds;
2072 }
2073
2074 /* Return var holding zero bounds. */
2075 tree
2076 chkp_get_zero_bounds_var (void)
2077 {
2078 if (!chkp_zero_bounds_var)
2079 chkp_zero_bounds_var
2080 = chkp_make_static_const_bounds (0, -1,
2081 CHKP_ZERO_BOUNDS_VAR_NAME);
2082 return chkp_zero_bounds_var;
2083 }
2084
2085 /* Return var holding none bounds. */
2086 tree
2087 chkp_get_none_bounds_var (void)
2088 {
2089 if (!chkp_none_bounds_var)
2090 chkp_none_bounds_var
2091 = chkp_make_static_const_bounds (-1, 0,
2092 CHKP_NONE_BOUNDS_VAR_NAME);
2093 return chkp_none_bounds_var;
2094 }
2095
2096 /* Return SSA_NAME used to represent zero bounds. */
2097 static tree
2098 chkp_get_zero_bounds (void)
2099 {
2100 if (zero_bounds)
2101 return zero_bounds;
2102
2103 if (dump_file && (dump_flags & TDF_DETAILS))
2104 fprintf (dump_file, "Creating zero bounds...");
2105
2106 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2107 || flag_chkp_use_static_const_bounds > 0)
2108 {
2109 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2110 gimple *stmt;
2111
2112 zero_bounds = chkp_get_tmp_reg (NULL);
2113 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2114 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2115 }
2116 else
2117 zero_bounds = chkp_make_bounds (integer_zero_node,
2118 integer_zero_node,
2119 NULL,
2120 false);
2121
2122 return zero_bounds;
2123 }
2124
2125 /* Return SSA_NAME used to represent none bounds. */
2126 static tree
2127 chkp_get_none_bounds (void)
2128 {
2129 if (none_bounds)
2130 return none_bounds;
2131
2132 if (dump_file && (dump_flags & TDF_DETAILS))
2133 fprintf (dump_file, "Creating none bounds...");
2134
2135
2136 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2137 || flag_chkp_use_static_const_bounds > 0)
2138 {
2139 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2140 gimple *stmt;
2141
2142 none_bounds = chkp_get_tmp_reg (NULL);
2143 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2144 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2145 }
2146 else
2147 none_bounds = chkp_make_bounds (integer_minus_one_node,
2148 build_int_cst (size_type_node, 2),
2149 NULL,
2150 false);
2151
2152 return none_bounds;
2153 }
2154
2155 /* Return bounds to be used as a result of operation which
2156 should not create poiunter (e.g. MULT_EXPR). */
2157 static tree
2158 chkp_get_invalid_op_bounds (void)
2159 {
2160 return chkp_get_zero_bounds ();
2161 }
2162
2163 /* Return bounds to be used for loads of non-pointer values. */
2164 static tree
2165 chkp_get_nonpointer_load_bounds (void)
2166 {
2167 return chkp_get_zero_bounds ();
2168 }
2169
2170 /* Return 1 if may use bndret call to get bounds for pointer
2171 returned by CALL. */
2172 static bool
2173 chkp_call_returns_bounds_p (gcall *call)
2174 {
2175 if (gimple_call_internal_p (call))
2176 {
2177 if (gimple_call_internal_fn (call) == IFN_VA_ARG)
2178 return true;
2179 return false;
2180 }
2181
2182 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2183 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2184 return true;
2185
2186 if (gimple_call_with_bounds_p (call))
2187 return true;
2188
2189 tree fndecl = gimple_call_fndecl (call);
2190
2191 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2192 return false;
2193
2194 if (fndecl && !chkp_instrumentable_p (fndecl))
2195 return false;
2196
2197 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2198 {
2199 if (chkp_instrument_normal_builtin (fndecl))
2200 return true;
2201
2202 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2203 return false;
2204
2205 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2206 return (clone && gimple_has_body_p (clone->decl));
2207 }
2208
2209 return true;
2210 }
2211
2212 /* Build bounds returned by CALL. */
2213 static tree
2214 chkp_build_returned_bound (gcall *call)
2215 {
2216 gimple_stmt_iterator gsi;
2217 tree bounds;
2218 gimple *stmt;
2219 tree fndecl = gimple_call_fndecl (call);
2220 unsigned int retflags;
2221 tree lhs = gimple_call_lhs (call);
2222
2223 /* To avoid fixing alloca expands in targets we handle
2224 it separately. */
2225 if (fndecl
2226 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2227 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2228 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2229 {
2230 tree size = gimple_call_arg (call, 0);
2231 gimple_stmt_iterator iter = gsi_for_stmt (call);
2232 bounds = chkp_make_bounds (lhs, size, &iter, true);
2233 }
2234 /* We know bounds returned by set_bounds builtin call. */
2235 else if (fndecl
2236 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2237 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2238 {
2239 tree lb = gimple_call_arg (call, 0);
2240 tree size = gimple_call_arg (call, 1);
2241 gimple_stmt_iterator iter = gsi_for_stmt (call);
2242 bounds = chkp_make_bounds (lb, size, &iter, true);
2243 }
2244 /* Detect bounds initialization calls. */
2245 else if (fndecl
2246 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2247 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2248 bounds = chkp_get_zero_bounds ();
2249 /* Detect bounds nullification calls. */
2250 else if (fndecl
2251 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2252 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2253 bounds = chkp_get_none_bounds ();
2254 /* Detect bounds copy calls. */
2255 else if (fndecl
2256 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2257 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2258 {
2259 gimple_stmt_iterator iter = gsi_for_stmt (call);
2260 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2261 }
2262 /* Do not use retbnd when returned bounds are equal to some
2263 of passed bounds. */
2264 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2265 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2266 {
2267 gimple_stmt_iterator iter = gsi_for_stmt (call);
2268 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2269 if (gimple_call_with_bounds_p (call))
2270 {
2271 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2272 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2273 {
2274 if (retarg)
2275 retarg--;
2276 else
2277 break;
2278 }
2279 }
2280 else
2281 argno = retarg;
2282
2283 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2284 }
2285 else if (chkp_call_returns_bounds_p (call)
2286 && BOUNDED_P (lhs))
2287 {
2288 gcc_assert (TREE_CODE (lhs) == SSA_NAME);
2289
2290 /* In general case build checker builtin call to
2291 obtain returned bounds. */
2292 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2293 gimple_call_lhs (call));
2294 chkp_mark_stmt (stmt);
2295
2296 gsi = gsi_for_stmt (call);
2297 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2298
2299 bounds = chkp_get_tmp_reg (stmt);
2300 gimple_call_set_lhs (stmt, bounds);
2301
2302 update_stmt (stmt);
2303 }
2304 else
2305 bounds = chkp_get_zero_bounds ();
2306
2307 if (dump_file && (dump_flags & TDF_DETAILS))
2308 {
2309 fprintf (dump_file, "Built returned bounds (");
2310 print_generic_expr (dump_file, bounds);
2311 fprintf (dump_file, ") for call: ");
2312 print_gimple_stmt (dump_file, call, 0, TDF_VOPS | TDF_MEMSYMS);
2313 }
2314
2315 bounds = chkp_maybe_copy_and_register_bounds (lhs, bounds);
2316
2317 return bounds;
2318 }
2319
2320 /* Return bounds used as returned by call
2321 which produced SSA name VAL. */
2322 gcall *
2323 chkp_retbnd_call_by_val (tree val)
2324 {
2325 if (TREE_CODE (val) != SSA_NAME)
2326 return NULL;
2327
2328 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2329
2330 imm_use_iterator use_iter;
2331 use_operand_p use_p;
2332 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2333 if (chkp_gimple_call_builtin_p (USE_STMT (use_p), BUILT_IN_CHKP_BNDRET))
2334 return as_a <gcall *> (USE_STMT (use_p));
2335
2336 return NULL;
2337 }
2338
2339 /* Check the next parameter for the given PARM is bounds
2340 and return it's default SSA_NAME (create if required). */
2341 static tree
2342 chkp_get_next_bounds_parm (tree parm)
2343 {
2344 tree bounds = TREE_CHAIN (parm);
2345 gcc_assert (POINTER_BOUNDS_P (bounds));
2346 bounds = ssa_default_def (cfun, bounds);
2347 if (!bounds)
2348 {
2349 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2350 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2351 }
2352 return bounds;
2353 }
2354
2355 /* Return bounds to be used for input argument PARM. */
2356 static tree
2357 chkp_get_bound_for_parm (tree parm)
2358 {
2359 tree decl = SSA_NAME_VAR (parm);
2360 tree bounds;
2361
2362 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2363
2364 bounds = chkp_get_registered_bounds (parm);
2365
2366 if (!bounds)
2367 bounds = chkp_get_registered_bounds (decl);
2368
2369 if (!bounds)
2370 {
2371 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2372
2373 /* For static chain param we return zero bounds
2374 because currently we do not check dereferences
2375 of this pointer. */
2376 if (cfun->static_chain_decl == decl)
2377 bounds = chkp_get_zero_bounds ();
2378 /* If non instrumented runtime is used then it may be useful
2379 to use zero bounds for input arguments of main
2380 function. */
2381 else if (flag_chkp_zero_input_bounds_for_main
2382 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2383 "main") == 0)
2384 bounds = chkp_get_zero_bounds ();
2385 else if (BOUNDED_P (parm))
2386 {
2387 bounds = chkp_get_next_bounds_parm (decl);
2388 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2389
2390 if (dump_file && (dump_flags & TDF_DETAILS))
2391 {
2392 fprintf (dump_file, "Built arg bounds (");
2393 print_generic_expr (dump_file, bounds);
2394 fprintf (dump_file, ") for arg: ");
2395 print_node (dump_file, "", decl, 0);
2396 }
2397 }
2398 else
2399 bounds = chkp_get_zero_bounds ();
2400 }
2401
2402 if (!chkp_get_registered_bounds (parm))
2403 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2404
2405 if (dump_file && (dump_flags & TDF_DETAILS))
2406 {
2407 fprintf (dump_file, "Using bounds ");
2408 print_generic_expr (dump_file, bounds);
2409 fprintf (dump_file, " for parm ");
2410 print_generic_expr (dump_file, parm);
2411 fprintf (dump_file, " of type ");
2412 print_generic_expr (dump_file, TREE_TYPE (parm));
2413 fprintf (dump_file, ".\n");
2414 }
2415
2416 return bounds;
2417 }
2418
2419 /* Build and return CALL_EXPR for bndstx builtin with specified
2420 arguments. */
2421 tree
2422 chkp_build_bndldx_call (tree addr, tree ptr)
2423 {
2424 tree fn = build1 (ADDR_EXPR,
2425 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2426 chkp_bndldx_fndecl);
2427 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2428 fn, 2, addr, ptr);
2429 CALL_WITH_BOUNDS_P (call) = true;
2430 return call;
2431 }
2432
2433 /* Insert code to load bounds for PTR located by ADDR.
2434 Code is inserted after position pointed by GSI.
2435 Loaded bounds are returned. */
2436 static tree
2437 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2438 {
2439 gimple_seq seq;
2440 gimple *stmt;
2441 tree bounds;
2442
2443 seq = NULL;
2444
2445 addr = chkp_force_gimple_call_op (addr, &seq);
2446 ptr = chkp_force_gimple_call_op (ptr, &seq);
2447
2448 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2449 chkp_mark_stmt (stmt);
2450 bounds = chkp_get_tmp_reg (stmt);
2451 gimple_call_set_lhs (stmt, bounds);
2452
2453 gimple_seq_add_stmt (&seq, stmt);
2454
2455 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2456
2457 if (dump_file && (dump_flags & TDF_DETAILS))
2458 {
2459 fprintf (dump_file, "Generated bndldx for pointer ");
2460 print_generic_expr (dump_file, ptr);
2461 fprintf (dump_file, ": ");
2462 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS | TDF_MEMSYMS);
2463 }
2464
2465 return bounds;
2466 }
2467
2468 /* Build and return CALL_EXPR for bndstx builtin with specified
2469 arguments. */
2470 tree
2471 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2472 {
2473 tree fn = build1 (ADDR_EXPR,
2474 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2475 chkp_bndstx_fndecl);
2476 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2477 fn, 3, ptr, bounds, addr);
2478 CALL_WITH_BOUNDS_P (call) = true;
2479 return call;
2480 }
2481
2482 /* Insert code to store BOUNDS for PTR stored by ADDR.
2483 New statements are inserted after position pointed
2484 by GSI. */
2485 void
2486 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2487 gimple_stmt_iterator *gsi)
2488 {
2489 gimple_seq seq;
2490 gimple *stmt;
2491
2492 seq = NULL;
2493
2494 addr = chkp_force_gimple_call_op (addr, &seq);
2495 ptr = chkp_force_gimple_call_op (ptr, &seq);
2496
2497 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2498 chkp_mark_stmt (stmt);
2499 gimple_call_set_with_bounds (stmt, true);
2500
2501 gimple_seq_add_stmt (&seq, stmt);
2502
2503 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2504
2505 if (dump_file && (dump_flags & TDF_DETAILS))
2506 {
2507 fprintf (dump_file, "Generated bndstx for pointer store ");
2508 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2509 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2510 }
2511 }
2512
2513 /* This function is called when call statement
2514 is inlined and therefore we can't use bndret
2515 for its LHS anymore. Function fixes bndret
2516 call using new RHS value if possible. */
2517 void
2518 chkp_fixup_inlined_call (tree lhs, tree rhs)
2519 {
2520 tree addr, bounds;
2521 gcall *retbnd, *bndldx;
2522
2523 if (!BOUNDED_P (lhs))
2524 return;
2525
2526 /* Search for retbnd call. */
2527 retbnd = chkp_retbnd_call_by_val (lhs);
2528 if (!retbnd)
2529 return;
2530
2531 /* Currently only handle cases when call is replaced
2532 with a memory access. In this case bndret call
2533 may be replaced with bndldx call. Otherwise we
2534 have to search for bounds which may cause wrong
2535 result due to various optimizations applied. */
2536 switch (TREE_CODE (rhs))
2537 {
2538 case VAR_DECL:
2539 if (DECL_REGISTER (rhs))
2540 return;
2541 break;
2542
2543 case MEM_REF:
2544 break;
2545
2546 case ARRAY_REF:
2547 case COMPONENT_REF:
2548 addr = get_base_address (rhs);
2549 if (!DECL_P (addr)
2550 && TREE_CODE (addr) != MEM_REF)
2551 return;
2552 if (DECL_P (addr) && DECL_REGISTER (addr))
2553 return;
2554 break;
2555
2556 default:
2557 return;
2558 }
2559
2560 /* Create a new statements sequence with bndldx call. */
2561 gimple_stmt_iterator gsi = gsi_for_stmt (retbnd);
2562 addr = build_fold_addr_expr (rhs);
2563 chkp_build_bndldx (addr, lhs, &gsi);
2564 bndldx = as_a <gcall *> (gsi_stmt (gsi));
2565
2566 /* Remove bndret call. */
2567 bounds = gimple_call_lhs (retbnd);
2568 gsi = gsi_for_stmt (retbnd);
2569 gsi_remove (&gsi, true);
2570
2571 /* Link new bndldx call. */
2572 gimple_call_set_lhs (bndldx, bounds);
2573 update_stmt (bndldx);
2574 }
2575
2576 /* Compute bounds for pointer NODE which was assigned in
2577 assignment statement ASSIGN. Return computed bounds. */
2578 static tree
2579 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2580 {
2581 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2582 tree rhs1 = gimple_assign_rhs1 (assign);
2583 tree bounds = NULL_TREE;
2584 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2585 tree base = NULL;
2586
2587 if (dump_file && (dump_flags & TDF_DETAILS))
2588 {
2589 fprintf (dump_file, "Computing bounds for assignment: ");
2590 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2591 }
2592
2593 switch (rhs_code)
2594 {
2595 case MEM_REF:
2596 case TARGET_MEM_REF:
2597 case COMPONENT_REF:
2598 case ARRAY_REF:
2599 /* We need to load bounds from the bounds table. */
2600 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2601 break;
2602
2603 case VAR_DECL:
2604 case SSA_NAME:
2605 case ADDR_EXPR:
2606 case POINTER_PLUS_EXPR:
2607 case NOP_EXPR:
2608 case CONVERT_EXPR:
2609 case INTEGER_CST:
2610 /* Bounds are just propagated from RHS. */
2611 bounds = chkp_find_bounds (rhs1, &iter);
2612 base = rhs1;
2613 break;
2614
2615 case VIEW_CONVERT_EXPR:
2616 /* Bounds are just propagated from RHS. */
2617 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2618 break;
2619
2620 case PARM_DECL:
2621 if (BOUNDED_P (rhs1))
2622 {
2623 /* We need to load bounds from the bounds table. */
2624 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2625 node, &iter);
2626 TREE_ADDRESSABLE (rhs1) = 1;
2627 }
2628 else
2629 bounds = chkp_get_nonpointer_load_bounds ();
2630 break;
2631
2632 case MINUS_EXPR:
2633 case PLUS_EXPR:
2634 case BIT_AND_EXPR:
2635 case BIT_IOR_EXPR:
2636 case BIT_XOR_EXPR:
2637 {
2638 tree rhs2 = gimple_assign_rhs2 (assign);
2639 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2640 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2641
2642 /* First we try to check types of operands. If it
2643 does not help then look at bound values.
2644
2645 If some bounds are incomplete and other are
2646 not proven to be valid (i.e. also incomplete
2647 or invalid because value is not pointer) then
2648 resulting value is incomplete and will be
2649 recomputed later in chkp_finish_incomplete_bounds. */
2650 if (BOUNDED_P (rhs1)
2651 && !BOUNDED_P (rhs2))
2652 bounds = bnd1;
2653 else if (BOUNDED_P (rhs2)
2654 && !BOUNDED_P (rhs1)
2655 && rhs_code != MINUS_EXPR)
2656 bounds = bnd2;
2657 else if (chkp_incomplete_bounds (bnd1))
2658 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2659 && !chkp_incomplete_bounds (bnd2))
2660 bounds = bnd2;
2661 else
2662 bounds = incomplete_bounds;
2663 else if (chkp_incomplete_bounds (bnd2))
2664 if (chkp_valid_bounds (bnd1)
2665 && !chkp_incomplete_bounds (bnd1))
2666 bounds = bnd1;
2667 else
2668 bounds = incomplete_bounds;
2669 else if (!chkp_valid_bounds (bnd1))
2670 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2671 bounds = bnd2;
2672 else if (bnd2 == chkp_get_zero_bounds ())
2673 bounds = bnd2;
2674 else
2675 bounds = bnd1;
2676 else if (!chkp_valid_bounds (bnd2))
2677 bounds = bnd1;
2678 else
2679 /* Seems both operands may have valid bounds
2680 (e.g. pointer minus pointer). In such case
2681 use default invalid op bounds. */
2682 bounds = chkp_get_invalid_op_bounds ();
2683
2684 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2685 }
2686 break;
2687
2688 case BIT_NOT_EXPR:
2689 case NEGATE_EXPR:
2690 case LSHIFT_EXPR:
2691 case RSHIFT_EXPR:
2692 case LROTATE_EXPR:
2693 case RROTATE_EXPR:
2694 case EQ_EXPR:
2695 case NE_EXPR:
2696 case LT_EXPR:
2697 case LE_EXPR:
2698 case GT_EXPR:
2699 case GE_EXPR:
2700 case MULT_EXPR:
2701 case RDIV_EXPR:
2702 case TRUNC_DIV_EXPR:
2703 case FLOOR_DIV_EXPR:
2704 case CEIL_DIV_EXPR:
2705 case ROUND_DIV_EXPR:
2706 case TRUNC_MOD_EXPR:
2707 case FLOOR_MOD_EXPR:
2708 case CEIL_MOD_EXPR:
2709 case ROUND_MOD_EXPR:
2710 case EXACT_DIV_EXPR:
2711 case FIX_TRUNC_EXPR:
2712 case FLOAT_EXPR:
2713 case REALPART_EXPR:
2714 case IMAGPART_EXPR:
2715 /* No valid bounds may be produced by these exprs. */
2716 bounds = chkp_get_invalid_op_bounds ();
2717 break;
2718
2719 case COND_EXPR:
2720 {
2721 tree val1 = gimple_assign_rhs2 (assign);
2722 tree val2 = gimple_assign_rhs3 (assign);
2723 tree bnd1 = chkp_find_bounds (val1, &iter);
2724 tree bnd2 = chkp_find_bounds (val2, &iter);
2725 gimple *stmt;
2726
2727 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2728 bounds = incomplete_bounds;
2729 else if (bnd1 == bnd2)
2730 bounds = bnd1;
2731 else
2732 {
2733 rhs1 = unshare_expr (rhs1);
2734
2735 bounds = chkp_get_tmp_reg (assign);
2736 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2737 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2738
2739 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2740 chkp_mark_invalid_bounds (bounds);
2741 }
2742 }
2743 break;
2744
2745 case MAX_EXPR:
2746 case MIN_EXPR:
2747 {
2748 tree rhs2 = gimple_assign_rhs2 (assign);
2749 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2750 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2751
2752 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2753 bounds = incomplete_bounds;
2754 else if (bnd1 == bnd2)
2755 bounds = bnd1;
2756 else
2757 {
2758 gimple *stmt;
2759 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2760 boolean_type_node, rhs1, rhs2);
2761 bounds = chkp_get_tmp_reg (assign);
2762 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2763
2764 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2765
2766 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2767 chkp_mark_invalid_bounds (bounds);
2768 }
2769 }
2770 break;
2771
2772 default:
2773 bounds = chkp_get_zero_bounds ();
2774 warning (0, "pointer bounds were lost due to unexpected expression %s",
2775 get_tree_code_name (rhs_code));
2776 }
2777
2778 gcc_assert (bounds);
2779
2780 /* We may reuse bounds of other pointer we copy/modify. But it is not
2781 allowed for abnormal ssa names. If we produced a pointer using
2782 abnormal ssa name, we better make a bounds copy to avoid coalescing
2783 issues. */
2784 if (base
2785 && TREE_CODE (base) == SSA_NAME
2786 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2787 {
2788 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2789 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2790 bounds = gimple_assign_lhs (stmt);
2791 }
2792
2793 if (node)
2794 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2795
2796 return bounds;
2797 }
2798
2799 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2800
2801 There are just few statement codes allowed: NOP (for default ssa names),
2802 ASSIGN, CALL, PHI, ASM.
2803
2804 Return computed bounds. */
2805 static tree
2806 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2807 gphi_iterator *iter)
2808 {
2809 tree var, bounds;
2810 enum gimple_code code = gimple_code (def_stmt);
2811 gphi *stmt;
2812
2813 if (dump_file && (dump_flags & TDF_DETAILS))
2814 {
2815 fprintf (dump_file, "Searching for bounds for node: ");
2816 print_generic_expr (dump_file, node);
2817
2818 fprintf (dump_file, " using its definition: ");
2819 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS | TDF_MEMSYMS);
2820 }
2821
2822 switch (code)
2823 {
2824 case GIMPLE_NOP:
2825 var = SSA_NAME_VAR (node);
2826 switch (TREE_CODE (var))
2827 {
2828 case PARM_DECL:
2829 bounds = chkp_get_bound_for_parm (node);
2830 break;
2831
2832 case VAR_DECL:
2833 /* For uninitialized pointers use none bounds. */
2834 bounds = chkp_get_none_bounds ();
2835 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2836 break;
2837
2838 case RESULT_DECL:
2839 {
2840 tree base_type;
2841
2842 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2843
2844 base_type = TREE_TYPE (TREE_TYPE (node));
2845
2846 gcc_assert (TYPE_SIZE (base_type)
2847 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2848 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2849
2850 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2851 NULL, false);
2852 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2853 }
2854 break;
2855
2856 default:
2857 if (dump_file && (dump_flags & TDF_DETAILS))
2858 {
2859 fprintf (dump_file, "Unexpected var with no definition\n");
2860 print_generic_expr (dump_file, var);
2861 }
2862 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2863 get_tree_code_name (TREE_CODE (var)));
2864 }
2865 break;
2866
2867 case GIMPLE_ASSIGN:
2868 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2869 break;
2870
2871 case GIMPLE_CALL:
2872 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2873 break;
2874
2875 case GIMPLE_PHI:
2876 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2877 if (SSA_NAME_VAR (node))
2878 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2879 else
2880 var = make_temp_ssa_name (pointer_bounds_type_node,
2881 NULL,
2882 CHKP_BOUND_TMP_NAME);
2883 else
2884 var = chkp_get_tmp_var ();
2885 stmt = create_phi_node (var, gimple_bb (def_stmt));
2886 bounds = gimple_phi_result (stmt);
2887 *iter = gsi_for_phi (stmt);
2888
2889 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2890
2891 /* Created bounds do not have all phi args computed and
2892 therefore we do not know if there is a valid source
2893 of bounds for that node. Therefore we mark bounds
2894 as incomplete and then recompute them when all phi
2895 args are computed. */
2896 chkp_register_incomplete_bounds (bounds, node);
2897 break;
2898
2899 case GIMPLE_ASM:
2900 bounds = chkp_get_zero_bounds ();
2901 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2902 break;
2903
2904 default:
2905 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2906 gimple_code_name[code]);
2907 }
2908
2909 return bounds;
2910 }
2911
2912 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2913 tree
2914 chkp_build_make_bounds_call (tree lower_bound, tree size)
2915 {
2916 tree call = build1 (ADDR_EXPR,
2917 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2918 chkp_bndmk_fndecl);
2919 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2920 call, 2, lower_bound, size);
2921 }
2922
2923 /* Create static bounds var of specfified OBJ which is
2924 is either VAR_DECL or string constant. */
2925 static tree
2926 chkp_make_static_bounds (tree obj)
2927 {
2928 static int string_id = 1;
2929 static int var_id = 1;
2930 tree *slot;
2931 const char *var_name;
2932 char *bnd_var_name;
2933 tree bnd_var;
2934
2935 /* First check if we already have required var. */
2936 if (chkp_static_var_bounds)
2937 {
2938 /* For vars we use assembler name as a key in
2939 chkp_static_var_bounds map. It allows to
2940 avoid duplicating bound vars for decls
2941 sharing assembler name. */
2942 if (VAR_P (obj))
2943 {
2944 tree name = DECL_ASSEMBLER_NAME (obj);
2945 slot = chkp_static_var_bounds->get (name);
2946 if (slot)
2947 return *slot;
2948 }
2949 else
2950 {
2951 slot = chkp_static_var_bounds->get (obj);
2952 if (slot)
2953 return *slot;
2954 }
2955 }
2956
2957 /* Build decl for bounds var. */
2958 if (VAR_P (obj))
2959 {
2960 if (DECL_IGNORED_P (obj))
2961 {
2962 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2963 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2964 }
2965 else
2966 {
2967 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2968
2969 /* For hidden symbols we want to skip first '*' char. */
2970 if (*var_name == '*')
2971 var_name++;
2972
2973 bnd_var_name = (char *) xmalloc (strlen (var_name)
2974 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2975 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2976 strcat (bnd_var_name, var_name);
2977 }
2978
2979 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2980 get_identifier (bnd_var_name),
2981 pointer_bounds_type_node);
2982
2983 /* Address of the obj will be used as lower bound. */
2984 TREE_ADDRESSABLE (obj) = 1;
2985 }
2986 else
2987 {
2988 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2989 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2990
2991 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2992 get_identifier (bnd_var_name),
2993 pointer_bounds_type_node);
2994 }
2995
2996 free (bnd_var_name);
2997
2998 TREE_PUBLIC (bnd_var) = 0;
2999 TREE_USED (bnd_var) = 1;
3000 TREE_READONLY (bnd_var) = 0;
3001 TREE_STATIC (bnd_var) = 1;
3002 TREE_ADDRESSABLE (bnd_var) = 0;
3003 DECL_ARTIFICIAL (bnd_var) = 1;
3004 DECL_COMMON (bnd_var) = 1;
3005 DECL_COMDAT (bnd_var) = 1;
3006 DECL_READ_P (bnd_var) = 1;
3007 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
3008 /* Force output similar to constant bounds.
3009 See chkp_make_static_const_bounds. */
3010 varpool_node::get_create (bnd_var)->force_output = 1;
3011 /* Mark symbol as requiring bounds initialization. */
3012 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
3013 varpool_node::finalize_decl (bnd_var);
3014
3015 /* Add created var to the map to use it for other references
3016 to obj. */
3017 if (!chkp_static_var_bounds)
3018 chkp_static_var_bounds = new hash_map<tree, tree>;
3019
3020 if (VAR_P (obj))
3021 {
3022 tree name = DECL_ASSEMBLER_NAME (obj);
3023 chkp_static_var_bounds->put (name, bnd_var);
3024 }
3025 else
3026 chkp_static_var_bounds->put (obj, bnd_var);
3027
3028 return bnd_var;
3029 }
3030
3031 /* When var has incomplete type we cannot get size to
3032 compute its bounds. In such cases we use checker
3033 builtin call which determines object size at runtime. */
3034 static tree
3035 chkp_generate_extern_var_bounds (tree var)
3036 {
3037 tree bounds, size_reloc, lb, size, max_size, cond;
3038 gimple_stmt_iterator gsi;
3039 gimple_seq seq = NULL;
3040 gimple *stmt;
3041
3042 /* If instrumentation is not enabled for vars having
3043 incomplete type then just return zero bounds to avoid
3044 checks for this var. */
3045 if (!flag_chkp_incomplete_type)
3046 return chkp_get_zero_bounds ();
3047
3048 if (dump_file && (dump_flags & TDF_DETAILS))
3049 {
3050 fprintf (dump_file, "Generating bounds for extern symbol '");
3051 print_generic_expr (dump_file, var);
3052 fprintf (dump_file, "'\n");
3053 }
3054
3055 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
3056
3057 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3058 gimple_call_set_lhs (stmt, size_reloc);
3059
3060 gimple_seq_add_stmt (&seq, stmt);
3061
3062 lb = chkp_build_addr_expr (var);
3063 size = make_ssa_name (chkp_get_size_tmp_var ());
3064
3065 if (flag_chkp_zero_dynamic_size_as_infinite)
3066 {
3067 /* We should check that size relocation was resolved.
3068 If it was not then use maximum possible size for the var. */
3069 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3070 fold_convert (chkp_uintptr_type, lb));
3071 max_size = chkp_force_gimple_call_op (max_size, &seq);
3072
3073 cond = build2 (NE_EXPR, boolean_type_node,
3074 size_reloc, integer_zero_node);
3075 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3076 gimple_seq_add_stmt (&seq, stmt);
3077 }
3078 else
3079 {
3080 stmt = gimple_build_assign (size, size_reloc);
3081 gimple_seq_add_stmt (&seq, stmt);
3082 }
3083
3084 gsi = gsi_start_bb (chkp_get_entry_block ());
3085 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3086
3087 bounds = chkp_make_bounds (lb, size, &gsi, true);
3088
3089 return bounds;
3090 }
3091
3092 /* Return 1 if TYPE has fields with zero size or fields
3093 marked with chkp_variable_size attribute. */
3094 bool
3095 chkp_variable_size_type (tree type)
3096 {
3097 bool res = false;
3098 tree field;
3099
3100 if (RECORD_OR_UNION_TYPE_P (type))
3101 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3102 {
3103 if (TREE_CODE (field) == FIELD_DECL)
3104 res = res
3105 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3106 || chkp_variable_size_type (TREE_TYPE (field));
3107 }
3108 else
3109 res = !TYPE_SIZE (type)
3110 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3111 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3112
3113 return res;
3114 }
3115
3116 /* Compute and return bounds for address of DECL which is
3117 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3118 static tree
3119 chkp_get_bounds_for_decl_addr (tree decl)
3120 {
3121 tree bounds;
3122
3123 gcc_assert (VAR_P (decl)
3124 || TREE_CODE (decl) == PARM_DECL
3125 || TREE_CODE (decl) == RESULT_DECL);
3126
3127 bounds = chkp_get_registered_addr_bounds (decl);
3128
3129 if (bounds)
3130 return bounds;
3131
3132 if (dump_file && (dump_flags & TDF_DETAILS))
3133 {
3134 fprintf (dump_file, "Building bounds for address of decl ");
3135 print_generic_expr (dump_file, decl);
3136 fprintf (dump_file, "\n");
3137 }
3138
3139 /* Use zero bounds if size is unknown and checks for
3140 unknown sizes are restricted. */
3141 if ((!DECL_SIZE (decl)
3142 || (chkp_variable_size_type (TREE_TYPE (decl))
3143 && (TREE_STATIC (decl)
3144 || DECL_EXTERNAL (decl)
3145 || TREE_PUBLIC (decl))))
3146 && !flag_chkp_incomplete_type)
3147 return chkp_get_zero_bounds ();
3148
3149 if (flag_chkp_use_static_bounds
3150 && VAR_P (decl)
3151 && (TREE_STATIC (decl)
3152 || DECL_EXTERNAL (decl)
3153 || TREE_PUBLIC (decl))
3154 && !DECL_THREAD_LOCAL_P (decl))
3155 {
3156 tree bnd_var = chkp_make_static_bounds (decl);
3157 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3158 gimple *stmt;
3159
3160 bounds = chkp_get_tmp_reg (NULL);
3161 stmt = gimple_build_assign (bounds, bnd_var);
3162 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3163 }
3164 else if (!DECL_SIZE (decl)
3165 || (chkp_variable_size_type (TREE_TYPE (decl))
3166 && (TREE_STATIC (decl)
3167 || DECL_EXTERNAL (decl)
3168 || TREE_PUBLIC (decl))))
3169 {
3170 gcc_assert (VAR_P (decl));
3171 bounds = chkp_generate_extern_var_bounds (decl);
3172 }
3173 else
3174 {
3175 tree lb = chkp_build_addr_expr (decl);
3176 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3177 }
3178
3179 return bounds;
3180 }
3181
3182 /* Compute and return bounds for constant string. */
3183 static tree
3184 chkp_get_bounds_for_string_cst (tree cst)
3185 {
3186 tree bounds;
3187 tree lb;
3188 tree size;
3189
3190 gcc_assert (TREE_CODE (cst) == STRING_CST);
3191
3192 bounds = chkp_get_registered_bounds (cst);
3193
3194 if (bounds)
3195 return bounds;
3196
3197 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3198 || flag_chkp_use_static_const_bounds > 0)
3199 {
3200 tree bnd_var = chkp_make_static_bounds (cst);
3201 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3202 gimple *stmt;
3203
3204 bounds = chkp_get_tmp_reg (NULL);
3205 stmt = gimple_build_assign (bounds, bnd_var);
3206 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3207 }
3208 else
3209 {
3210 lb = chkp_build_addr_expr (cst);
3211 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3212 bounds = chkp_make_bounds (lb, size, NULL, false);
3213 }
3214
3215 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3216
3217 return bounds;
3218 }
3219
3220 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3221 return the result. if ITER is not NULL then Code is inserted
3222 before position pointed by ITER. Otherwise code is added to
3223 entry block. */
3224 static tree
3225 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3226 {
3227 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3228 return bounds2 ? bounds2 : bounds1;
3229 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3230 return bounds1;
3231 else
3232 {
3233 gimple_seq seq;
3234 gimple *stmt;
3235 tree bounds;
3236
3237 seq = NULL;
3238
3239 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3240 chkp_mark_stmt (stmt);
3241
3242 bounds = chkp_get_tmp_reg (stmt);
3243 gimple_call_set_lhs (stmt, bounds);
3244
3245 gimple_seq_add_stmt (&seq, stmt);
3246
3247 /* We are probably doing narrowing for constant expression.
3248 In such case iter may be undefined. */
3249 if (!iter)
3250 {
3251 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3252 iter = &gsi;
3253 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3254 }
3255 else
3256 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3257
3258 if (dump_file && (dump_flags & TDF_DETAILS))
3259 {
3260 fprintf (dump_file, "Bounds intersection: ");
3261 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3262 fprintf (dump_file, " inserted before statement: ");
3263 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3264 TDF_VOPS|TDF_MEMSYMS);
3265 }
3266
3267 return bounds;
3268 }
3269 }
3270
3271 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3272 and 0 othersize. REF is reference to the field. */
3273
3274 static bool
3275 chkp_may_narrow_to_field (tree ref, tree field)
3276 {
3277 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3278 && tree_to_uhwi (DECL_SIZE (field)) != 0
3279 && !(flag_chkp_flexible_struct_trailing_arrays
3280 && array_at_struct_end_p (ref))
3281 && (!DECL_FIELD_OFFSET (field)
3282 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3283 && (!DECL_FIELD_BIT_OFFSET (field)
3284 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3285 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3286 && !chkp_variable_size_type (TREE_TYPE (field));
3287 }
3288
3289 /* Return 1 if bounds for FIELD should be narrowed to
3290 field's own size. REF is reference to the field. */
3291
3292 static bool
3293 chkp_narrow_bounds_for_field (tree ref, tree field)
3294 {
3295 HOST_WIDE_INT offs;
3296 HOST_WIDE_INT bit_offs;
3297
3298 if (!chkp_may_narrow_to_field (ref, field))
3299 return false;
3300
3301 /* Access to compiler generated fields should not cause
3302 bounds narrowing. */
3303 if (DECL_ARTIFICIAL (field))
3304 return false;
3305
3306 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3307 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3308
3309 return (flag_chkp_narrow_bounds
3310 && (flag_chkp_first_field_has_own_bounds
3311 || offs
3312 || bit_offs));
3313 }
3314
3315 /* Perform narrowing for BOUNDS of an INNER reference. Shift boundary
3316 by OFFSET bytes and limit to SIZE bytes. Newly created statements are
3317 added to ITER. */
3318
3319 static tree
3320 chkp_narrow_size_and_offset (tree bounds, tree inner, tree offset,
3321 tree size, gimple_stmt_iterator *iter)
3322 {
3323 tree addr = chkp_build_addr_expr (unshare_expr (inner));
3324 tree t = TREE_TYPE (addr);
3325
3326 gimple *stmt = gimple_build_assign (NULL_TREE, addr);
3327 addr = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
3328 gimple_assign_set_lhs (stmt, addr);
3329 gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
3330
3331 stmt = gimple_build_assign (NULL_TREE, POINTER_PLUS_EXPR, addr, offset);
3332 tree shifted = make_temp_ssa_name (t, stmt, CHKP_BOUND_TMP_NAME);
3333 gimple_assign_set_lhs (stmt, shifted);
3334 gsi_insert_seq_before (iter, stmt, GSI_SAME_STMT);
3335
3336 tree bounds2 = chkp_make_bounds (shifted, size, iter, false);
3337
3338 return chkp_intersect_bounds (bounds, bounds2, iter);
3339 }
3340
3341 /* Perform narrowing for BOUNDS using bounds computed for field
3342 access COMPONENT. ITER meaning is the same as for
3343 chkp_intersect_bounds. */
3344
3345 static tree
3346 chkp_narrow_bounds_to_field (tree bounds, tree component,
3347 gimple_stmt_iterator *iter)
3348 {
3349 tree field = TREE_OPERAND (component, 1);
3350 tree size = DECL_SIZE_UNIT (field);
3351 tree field_ptr = chkp_build_addr_expr (component);
3352 tree field_bounds;
3353
3354 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3355
3356 return chkp_intersect_bounds (field_bounds, bounds, iter);
3357 }
3358
3359 /* Parse field or array access NODE.
3360
3361 PTR ouput parameter holds a pointer to the outermost
3362 object.
3363
3364 BITFIELD output parameter is set to 1 if bitfield is
3365 accessed and to 0 otherwise. If it is 1 then ELT holds
3366 outer component for accessed bit field.
3367
3368 SAFE outer parameter is set to 1 if access is safe and
3369 checks are not required.
3370
3371 BOUNDS outer parameter holds bounds to be used to check
3372 access (may be NULL).
3373
3374 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3375 innermost accessed component. */
3376 static void
3377 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3378 tree *elt, bool *safe,
3379 bool *bitfield,
3380 tree *bounds,
3381 gimple_stmt_iterator *iter,
3382 bool innermost_bounds)
3383 {
3384 tree comp_to_narrow = NULL_TREE;
3385 tree last_comp = NULL_TREE;
3386 bool array_ref_found = false;
3387 tree *nodes;
3388 tree var;
3389 int len;
3390 int i;
3391
3392 /* Compute tree height for expression. */
3393 var = node;
3394 len = 1;
3395 while (TREE_CODE (var) == COMPONENT_REF
3396 || TREE_CODE (var) == ARRAY_REF
3397 || TREE_CODE (var) == VIEW_CONVERT_EXPR
3398 || TREE_CODE (var) == BIT_FIELD_REF)
3399 {
3400 var = TREE_OPERAND (var, 0);
3401 len++;
3402 }
3403
3404 gcc_assert (len > 1);
3405
3406 /* It is more convenient for us to scan left-to-right,
3407 so walk tree again and put all node to nodes vector
3408 in reversed order. */
3409 nodes = XALLOCAVEC (tree, len);
3410 nodes[len - 1] = node;
3411 for (i = len - 2; i >= 0; i--)
3412 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3413
3414 if (bounds)
3415 *bounds = NULL;
3416 *safe = true;
3417 *bitfield = ((TREE_CODE (node) == COMPONENT_REF
3418 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)))
3419 || TREE_CODE (node) == BIT_FIELD_REF);
3420 /* To get bitfield address we will need outer element. */
3421 if (*bitfield)
3422 *elt = nodes[len - 2];
3423 else
3424 *elt = NULL_TREE;
3425
3426 /* If we have indirection in expression then compute
3427 outermost structure bounds. Computed bounds may be
3428 narrowed later. */
3429 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3430 {
3431 *safe = false;
3432 *ptr = TREE_OPERAND (nodes[0], 0);
3433 if (bounds)
3434 *bounds = chkp_find_bounds (*ptr, iter);
3435 }
3436 else
3437 {
3438 gcc_assert (VAR_P (var)
3439 || TREE_CODE (var) == PARM_DECL
3440 || TREE_CODE (var) == RESULT_DECL
3441 || TREE_CODE (var) == STRING_CST
3442 || TREE_CODE (var) == SSA_NAME);
3443
3444 *ptr = chkp_build_addr_expr (var);
3445 }
3446
3447 /* In this loop we are trying to find a field access
3448 requiring narrowing. There are two simple rules
3449 for search:
3450 1. Leftmost array_ref is chosen if any.
3451 2. Rightmost suitable component_ref is chosen if innermost
3452 bounds are required and no array_ref exists. */
3453 for (i = 1; i < len; i++)
3454 {
3455 var = nodes[i];
3456
3457 if (TREE_CODE (var) == ARRAY_REF)
3458 {
3459 *safe = false;
3460 array_ref_found = true;
3461 if (flag_chkp_narrow_bounds
3462 && !flag_chkp_narrow_to_innermost_arrray
3463 && (!last_comp
3464 || chkp_may_narrow_to_field (var,
3465 TREE_OPERAND (last_comp, 1))))
3466 {
3467 comp_to_narrow = last_comp;
3468 break;
3469 }
3470 }
3471 else if (TREE_CODE (var) == COMPONENT_REF)
3472 {
3473 tree field = TREE_OPERAND (var, 1);
3474
3475 if (innermost_bounds
3476 && !array_ref_found
3477 && chkp_narrow_bounds_for_field (var, field))
3478 comp_to_narrow = var;
3479 last_comp = var;
3480
3481 if (flag_chkp_narrow_bounds
3482 && flag_chkp_narrow_to_innermost_arrray
3483 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3484 {
3485 if (bounds)
3486 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3487 comp_to_narrow = NULL;
3488 }
3489 }
3490 else if (TREE_CODE (var) == BIT_FIELD_REF)
3491 {
3492 if (flag_chkp_narrow_bounds && bounds)
3493 {
3494 tree offset, size;
3495 chkp_parse_bit_field_ref (var, UNKNOWN_LOCATION, &offset, &size);
3496 *bounds
3497 = chkp_narrow_size_and_offset (*bounds, TREE_OPERAND (var, 0),
3498 offset, size, iter);
3499 }
3500 }
3501 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3502 /* Nothing to do for it. */
3503 ;
3504 else
3505 gcc_unreachable ();
3506 }
3507
3508 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3509 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3510
3511 if (innermost_bounds && bounds && !*bounds)
3512 *bounds = chkp_find_bounds (*ptr, iter);
3513 }
3514
3515 /* Parse BIT_FIELD_REF to a NODE for a given location LOC. Return OFFSET
3516 and SIZE in bytes. */
3517
3518 static
3519 void chkp_parse_bit_field_ref (tree node, location_t loc, tree *offset,
3520 tree *size)
3521 {
3522 tree bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3523 tree offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3524 tree rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3525 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3526
3527 tree s = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3528 s = size_binop_loc (loc, PLUS_EXPR, s, rem);
3529 s = size_binop_loc (loc, CEIL_DIV_EXPR, s, bpu);
3530 s = fold_convert (size_type_node, s);
3531
3532 *offset = offs;
3533 *size = s;
3534 }
3535
3536 /* Compute and return bounds for address of OBJ. */
3537 static tree
3538 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3539 {
3540 tree bounds = chkp_get_registered_addr_bounds (obj);
3541
3542 if (bounds)
3543 return bounds;
3544
3545 switch (TREE_CODE (obj))
3546 {
3547 case VAR_DECL:
3548 case PARM_DECL:
3549 case RESULT_DECL:
3550 bounds = chkp_get_bounds_for_decl_addr (obj);
3551 break;
3552
3553 case STRING_CST:
3554 bounds = chkp_get_bounds_for_string_cst (obj);
3555 break;
3556
3557 case ARRAY_REF:
3558 case COMPONENT_REF:
3559 case BIT_FIELD_REF:
3560 {
3561 tree elt;
3562 tree ptr;
3563 bool safe;
3564 bool bitfield;
3565
3566 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3567 &bitfield, &bounds, iter, true);
3568
3569 gcc_assert (bounds);
3570 }
3571 break;
3572
3573 case FUNCTION_DECL:
3574 case LABEL_DECL:
3575 bounds = chkp_get_zero_bounds ();
3576 break;
3577
3578 case MEM_REF:
3579 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3580 break;
3581
3582 case REALPART_EXPR:
3583 case IMAGPART_EXPR:
3584 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3585 break;
3586
3587 default:
3588 if (dump_file && (dump_flags & TDF_DETAILS))
3589 {
3590 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3591 "unexpected object of type %s\n",
3592 get_tree_code_name (TREE_CODE (obj)));
3593 print_node (dump_file, "", obj, 0);
3594 }
3595 internal_error ("chkp_make_addressed_object_bounds: "
3596 "Unexpected tree code %s",
3597 get_tree_code_name (TREE_CODE (obj)));
3598 }
3599
3600 chkp_register_addr_bounds (obj, bounds);
3601
3602 return bounds;
3603 }
3604
3605 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3606 to compute bounds if required. Computed bounds should be available at
3607 position pointed by ITER.
3608
3609 If PTR_SRC is NULL_TREE then pointer definition is identified.
3610
3611 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3612 PTR. If PTR is a any memory reference then ITER points to a statement
3613 after which bndldx will be inserterd. In both cases ITER will be updated
3614 to point to the inserted bndldx statement. */
3615
3616 static tree
3617 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3618 {
3619 tree addr = NULL_TREE;
3620 tree bounds = NULL_TREE;
3621
3622 if (!ptr_src)
3623 ptr_src = ptr;
3624
3625 bounds = chkp_get_registered_bounds (ptr_src);
3626
3627 if (bounds)
3628 return bounds;
3629
3630 switch (TREE_CODE (ptr_src))
3631 {
3632 case MEM_REF:
3633 case VAR_DECL:
3634 if (BOUNDED_P (ptr_src))
3635 if (VAR_P (ptr) && DECL_REGISTER (ptr))
3636 bounds = chkp_get_zero_bounds ();
3637 else
3638 {
3639 addr = chkp_build_addr_expr (ptr_src);
3640 bounds = chkp_build_bndldx (addr, ptr, iter);
3641 }
3642 else
3643 bounds = chkp_get_nonpointer_load_bounds ();
3644 break;
3645
3646 case ARRAY_REF:
3647 case COMPONENT_REF:
3648 addr = get_base_address (ptr_src);
3649 if (DECL_P (addr)
3650 || TREE_CODE (addr) == MEM_REF
3651 || TREE_CODE (addr) == TARGET_MEM_REF)
3652 {
3653 if (BOUNDED_P (ptr_src))
3654 if (VAR_P (ptr) && DECL_REGISTER (ptr))
3655 bounds = chkp_get_zero_bounds ();
3656 else
3657 {
3658 addr = chkp_build_addr_expr (ptr_src);
3659 bounds = chkp_build_bndldx (addr, ptr, iter);
3660 }
3661 else
3662 bounds = chkp_get_nonpointer_load_bounds ();
3663 }
3664 else
3665 {
3666 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3667 bounds = chkp_find_bounds (addr, iter);
3668 }
3669 break;
3670
3671 case PARM_DECL:
3672 /* Handled above but failed. */
3673 bounds = chkp_get_invalid_op_bounds ();
3674 break;
3675
3676 case TARGET_MEM_REF:
3677 addr = chkp_build_addr_expr (ptr_src);
3678 bounds = chkp_build_bndldx (addr, ptr, iter);
3679 break;
3680
3681 case SSA_NAME:
3682 bounds = chkp_get_registered_bounds (ptr_src);
3683 if (!bounds)
3684 {
3685 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3686 gphi_iterator phi_iter;
3687
3688 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3689
3690 gcc_assert (bounds);
3691
3692 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3693 {
3694 unsigned i;
3695
3696 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3697 {
3698 tree arg = gimple_phi_arg_def (def_phi, i);
3699 tree arg_bnd;
3700 gphi *phi_bnd;
3701
3702 arg_bnd = chkp_find_bounds (arg, NULL);
3703
3704 /* chkp_get_bounds_by_definition created new phi
3705 statement and phi_iter points to it.
3706
3707 Previous call to chkp_find_bounds could create
3708 new basic block and therefore change phi statement
3709 phi_iter points to. */
3710 phi_bnd = phi_iter.phi ();
3711
3712 add_phi_arg (phi_bnd, arg_bnd,
3713 gimple_phi_arg_edge (def_phi, i),
3714 UNKNOWN_LOCATION);
3715 }
3716
3717 /* If all bound phi nodes have their arg computed
3718 then we may finish its computation. See
3719 chkp_finish_incomplete_bounds for more details. */
3720 if (chkp_may_finish_incomplete_bounds ())
3721 chkp_finish_incomplete_bounds ();
3722 }
3723
3724 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3725 || chkp_incomplete_bounds (bounds));
3726 }
3727 break;
3728
3729 case ADDR_EXPR:
3730 case WITH_SIZE_EXPR:
3731 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3732 break;
3733
3734 case INTEGER_CST:
3735 case COMPLEX_CST:
3736 case VECTOR_CST:
3737 if (integer_zerop (ptr_src))
3738 bounds = chkp_get_none_bounds ();
3739 else
3740 bounds = chkp_get_invalid_op_bounds ();
3741 break;
3742
3743 default:
3744 if (dump_file && (dump_flags & TDF_DETAILS))
3745 {
3746 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3747 get_tree_code_name (TREE_CODE (ptr_src)));
3748 print_node (dump_file, "", ptr_src, 0);
3749 }
3750 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3751 get_tree_code_name (TREE_CODE (ptr_src)));
3752 }
3753
3754 if (!bounds)
3755 {
3756 if (dump_file && (dump_flags & TDF_DETAILS))
3757 {
3758 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3759 print_node (dump_file, "", ptr_src, 0);
3760 }
3761 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3762 }
3763
3764 return bounds;
3765 }
3766
3767 /* Normal case for bounds search without forced narrowing. */
3768 static tree
3769 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3770 {
3771 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3772 }
3773
3774 /* Search bounds for pointer PTR loaded from PTR_SRC
3775 by statement *ITER points to. */
3776 static tree
3777 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3778 {
3779 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3780 }
3781
3782 /* Helper function which checks type of RHS and finds all pointers in
3783 it. For each found pointer we build it's accesses in LHS and RHS
3784 objects and then call HANDLER for them. Function is used to copy
3785 or initilize bounds for copied object. */
3786 static void
3787 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3788 assign_handler handler)
3789 {
3790 tree type = TREE_TYPE (lhs);
3791
3792 /* We have nothing to do with clobbers. */
3793 if (TREE_CLOBBER_P (rhs))
3794 return;
3795
3796 if (BOUNDED_TYPE_P (type))
3797 handler (lhs, rhs, arg);
3798 else if (RECORD_OR_UNION_TYPE_P (type))
3799 {
3800 tree field;
3801
3802 if (TREE_CODE (rhs) == CONSTRUCTOR)
3803 {
3804 unsigned HOST_WIDE_INT cnt;
3805 tree val;
3806
3807 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3808 {
3809 if (field && chkp_type_has_pointer (TREE_TYPE (field)))
3810 {
3811 tree lhs_field = chkp_build_component_ref (lhs, field);
3812 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3813 }
3814 }
3815 }
3816 else
3817 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3818 if (TREE_CODE (field) == FIELD_DECL
3819 && chkp_type_has_pointer (TREE_TYPE (field)))
3820 {
3821 tree rhs_field = chkp_build_component_ref (rhs, field);
3822 tree lhs_field = chkp_build_component_ref (lhs, field);
3823 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3824 }
3825 }
3826 else if (TREE_CODE (type) == ARRAY_TYPE)
3827 {
3828 unsigned HOST_WIDE_INT cur = 0;
3829 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3830 tree etype = TREE_TYPE (type);
3831 tree esize = TYPE_SIZE (etype);
3832
3833 if (TREE_CODE (rhs) == CONSTRUCTOR)
3834 {
3835 unsigned HOST_WIDE_INT cnt;
3836 tree purp, val, lhs_elem;
3837
3838 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3839 {
3840 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3841 {
3842 tree lo_index = TREE_OPERAND (purp, 0);
3843 tree hi_index = TREE_OPERAND (purp, 1);
3844
3845 for (cur = (unsigned)tree_to_uhwi (lo_index);
3846 cur <= (unsigned)tree_to_uhwi (hi_index);
3847 cur++)
3848 {
3849 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3850 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3851 }
3852 }
3853 else
3854 {
3855 if (purp)
3856 {
3857 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3858 cur = tree_to_uhwi (purp);
3859 }
3860
3861 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3862
3863 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3864 }
3865 }
3866 }
3867 /* Copy array only when size is known. */
3868 else if (maxval && !integer_minus_onep (maxval))
3869 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3870 {
3871 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3872 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3873 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3874 }
3875 }
3876 else
3877 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3878 get_tree_code_name (TREE_CODE (type)));
3879 }
3880
3881 /* Add code to copy bounds for assignment of RHS to LHS.
3882 ARG is an iterator pointing ne code position. */
3883 static void
3884 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3885 {
3886 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3887 tree bounds = chkp_find_bounds (rhs, iter);
3888 tree addr = chkp_build_addr_expr(lhs);
3889
3890 chkp_build_bndstx (addr, rhs, bounds, iter);
3891 }
3892
3893 /* Emit static bound initilizers and size vars. */
3894 void
3895 chkp_finish_file (void)
3896 {
3897 struct varpool_node *node;
3898 struct chkp_ctor_stmt_list stmts;
3899
3900 if (seen_error ())
3901 return;
3902
3903 /* Iterate through varpool and generate bounds initialization
3904 constructors for all statically initialized pointers. */
3905 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3906 stmts.stmts = NULL;
3907 FOR_EACH_VARIABLE (node)
3908 /* Check that var is actually emitted and we need and may initialize
3909 its bounds. */
3910 if (node->need_bounds_init
3911 && !POINTER_BOUNDS_P (node->decl)
3912 && DECL_RTL (node->decl)
3913 && MEM_P (DECL_RTL (node->decl))
3914 && TREE_ASM_WRITTEN (node->decl))
3915 {
3916 chkp_walk_pointer_assignments (node->decl,
3917 DECL_INITIAL (node->decl),
3918 &stmts,
3919 chkp_add_modification_to_stmt_list);
3920
3921 if (stmts.avail <= 0)
3922 {
3923 cgraph_build_static_cdtor ('P', stmts.stmts,
3924 MAX_RESERVED_INIT_PRIORITY + 3);
3925 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3926 stmts.stmts = NULL;
3927 }
3928 }
3929
3930 if (stmts.stmts)
3931 cgraph_build_static_cdtor ('P', stmts.stmts,
3932 MAX_RESERVED_INIT_PRIORITY + 3);
3933
3934 /* Iterate through varpool and generate bounds initialization
3935 constructors for all static bounds vars. */
3936 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3937 stmts.stmts = NULL;
3938 FOR_EACH_VARIABLE (node)
3939 if (node->need_bounds_init
3940 && POINTER_BOUNDS_P (node->decl)
3941 && TREE_ASM_WRITTEN (node->decl))
3942 {
3943 tree bnd = node->decl;
3944 tree var;
3945
3946 gcc_assert (DECL_INITIAL (bnd)
3947 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3948
3949 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3950 chkp_output_static_bounds (bnd, var, &stmts);
3951 }
3952
3953 if (stmts.stmts)
3954 cgraph_build_static_cdtor ('B', stmts.stmts,
3955 MAX_RESERVED_INIT_PRIORITY + 2);
3956
3957 delete chkp_static_var_bounds;
3958 delete chkp_bounds_map;
3959 }
3960
3961 /* An instrumentation function which is called for each statement
3962 having memory access we want to instrument. It inserts check
3963 code and bounds copy code.
3964
3965 ITER points to statement to instrument.
3966
3967 NODE holds memory access in statement to check.
3968
3969 LOC holds the location information for statement.
3970
3971 DIRFLAGS determines whether access is read or write.
3972
3973 ACCESS_OFFS should be added to address used in NODE
3974 before check.
3975
3976 ACCESS_SIZE holds size of checked access.
3977
3978 SAFE indicates if NODE access is safe and should not be
3979 checked. */
3980 static void
3981 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3982 location_t loc, tree dirflag,
3983 tree access_offs, tree access_size,
3984 bool safe)
3985 {
3986 tree node_type = TREE_TYPE (node);
3987 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3988 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3989 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3990 tree ptr = NULL_TREE; /* a pointer used for dereference */
3991 tree bounds = NULL_TREE;
3992
3993 /* We do not need instrumentation for clobbers. */
3994 if (dirflag == integer_one_node
3995 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3996 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3997 return;
3998
3999 switch (TREE_CODE (node))
4000 {
4001 case ARRAY_REF:
4002 case COMPONENT_REF:
4003 {
4004 bool bitfield;
4005 tree elt;
4006
4007 if (safe)
4008 {
4009 /* We are not going to generate any checks, so do not
4010 generate bounds as well. */
4011 addr_first = chkp_build_addr_expr (node);
4012 break;
4013 }
4014
4015 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
4016 &bitfield, &bounds, iter, false);
4017
4018 /* Break if there is no dereference and operation is safe. */
4019
4020 if (bitfield)
4021 {
4022 tree field = TREE_OPERAND (node, 1);
4023
4024 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
4025 size = DECL_SIZE_UNIT (field);
4026
4027 if (elt)
4028 elt = chkp_build_addr_expr (elt);
4029 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
4030 addr_first = fold_build_pointer_plus_loc (loc,
4031 addr_first,
4032 byte_position (field));
4033 }
4034 else
4035 addr_first = chkp_build_addr_expr (node);
4036 }
4037 break;
4038
4039 case INDIRECT_REF:
4040 ptr = TREE_OPERAND (node, 0);
4041 addr_first = ptr;
4042 break;
4043
4044 case MEM_REF:
4045 ptr = TREE_OPERAND (node, 0);
4046 addr_first = chkp_build_addr_expr (node);
4047 break;
4048
4049 case TARGET_MEM_REF:
4050 ptr = TMR_BASE (node);
4051 addr_first = chkp_build_addr_expr (node);
4052 break;
4053
4054 case ARRAY_RANGE_REF:
4055 printf("ARRAY_RANGE_REF\n");
4056 debug_gimple_stmt(gsi_stmt(*iter));
4057 debug_tree(node);
4058 gcc_unreachable ();
4059 break;
4060
4061 case BIT_FIELD_REF:
4062 {
4063 tree offset, size;
4064
4065 gcc_assert (!access_offs);
4066 gcc_assert (!access_size);
4067
4068 chkp_parse_bit_field_ref (node, loc, &offset, &size);
4069
4070 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
4071 dirflag, offset, size, safe);
4072 return;
4073 }
4074 break;
4075
4076 case VAR_DECL:
4077 case RESULT_DECL:
4078 case PARM_DECL:
4079 if (dirflag != integer_one_node
4080 || DECL_REGISTER (node))
4081 return;
4082
4083 safe = true;
4084 addr_first = chkp_build_addr_expr (node);
4085 break;
4086
4087 default:
4088 return;
4089 }
4090
4091 /* If addr_last was not computed then use (addr_first + size - 1)
4092 expression to compute it. */
4093 if (!addr_last)
4094 {
4095 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
4096 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
4097 }
4098
4099 /* Shift both first_addr and last_addr by access_offs if specified. */
4100 if (access_offs)
4101 {
4102 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
4103 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
4104 }
4105
4106 /* Generate bndcl/bndcu checks if memory access is not safe. */
4107 if (!safe)
4108 {
4109 gimple_stmt_iterator stmt_iter = *iter;
4110
4111 if (!bounds)
4112 bounds = chkp_find_bounds (ptr, iter);
4113
4114 chkp_check_mem_access (addr_first, addr_last, bounds,
4115 stmt_iter, loc, dirflag);
4116 }
4117
4118 /* We need to store bounds in case pointer is stored. */
4119 if (dirflag == integer_one_node
4120 && chkp_type_has_pointer (node_type)
4121 && flag_chkp_store_bounds)
4122 {
4123 gimple *stmt = gsi_stmt (*iter);
4124 tree rhs1 = gimple_assign_rhs1 (stmt);
4125 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4126
4127 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4128 chkp_walk_pointer_assignments (node, rhs1, iter,
4129 chkp_copy_bounds_for_elem);
4130 else
4131 {
4132 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4133 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4134 }
4135 }
4136 }
4137
4138 /* Add code to copy bounds for all pointers copied
4139 in ASSIGN created during inline of EDGE. */
4140 void
4141 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
4142 {
4143 tree lhs = gimple_assign_lhs (assign);
4144 tree rhs = gimple_assign_rhs1 (assign);
4145 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4146
4147 if (!flag_chkp_store_bounds)
4148 return;
4149
4150 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4151
4152 /* We should create edges for all created calls to bndldx and bndstx. */
4153 while (gsi_stmt (iter) != assign)
4154 {
4155 gimple *stmt = gsi_stmt (iter);
4156 if (gimple_code (stmt) == GIMPLE_CALL)
4157 {
4158 tree fndecl = gimple_call_fndecl (stmt);
4159 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4160 struct cgraph_edge *new_edge;
4161
4162 gcc_assert (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDSTX)
4163 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDLDX)
4164 || chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET));
4165
4166 new_edge = edge->caller->create_edge (callee,
4167 as_a <gcall *> (stmt),
4168 edge->count,
4169 edge->frequency);
4170 new_edge->frequency = compute_call_stmt_bb_frequency
4171 (edge->caller->decl, gimple_bb (stmt));
4172 }
4173 gsi_prev (&iter);
4174 }
4175 }
4176
4177 /* Some code transformation made during instrumentation pass
4178 may put code into inconsistent state. Here we find and fix
4179 such flaws. */
4180 void
4181 chkp_fix_cfg ()
4182 {
4183 basic_block bb;
4184 gimple_stmt_iterator i;
4185
4186 /* We could insert some code right after stmt which ends bb.
4187 We wanted to put this code on fallthru edge but did not
4188 add new edges from the beginning because it may cause new
4189 phi node creation which may be incorrect due to incomplete
4190 bound phi nodes. */
4191 FOR_ALL_BB_FN (bb, cfun)
4192 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4193 {
4194 gimple *stmt = gsi_stmt (i);
4195 gimple_stmt_iterator next = i;
4196
4197 gsi_next (&next);
4198
4199 if (stmt_ends_bb_p (stmt)
4200 && !gsi_end_p (next))
4201 {
4202 edge fall = find_fallthru_edge (bb->succs);
4203 basic_block dest = NULL;
4204 int flags = 0;
4205
4206 gcc_assert (fall);
4207
4208 /* We cannot split abnormal edge. Therefore we
4209 store its params, make it regular and then
4210 rebuild abnormal edge after split. */
4211 if (fall->flags & EDGE_ABNORMAL)
4212 {
4213 flags = fall->flags & ~EDGE_FALLTHRU;
4214 dest = fall->dest;
4215
4216 fall->flags &= ~EDGE_COMPLEX;
4217 }
4218
4219 while (!gsi_end_p (next))
4220 {
4221 gimple *next_stmt = gsi_stmt (next);
4222 gsi_remove (&next, false);
4223 gsi_insert_on_edge (fall, next_stmt);
4224 }
4225
4226 gsi_commit_edge_inserts ();
4227
4228 /* Re-create abnormal edge. */
4229 if (dest)
4230 make_edge (bb, dest, flags);
4231 }
4232 }
4233 }
4234
4235 /* Walker callback for chkp_replace_function_pointers. Replaces
4236 function pointer in the specified operand with pointer to the
4237 instrumented function version. */
4238 static tree
4239 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4240 void *data ATTRIBUTE_UNUSED)
4241 {
4242 if (TREE_CODE (*op) == FUNCTION_DECL
4243 && chkp_instrumentable_p (*op)
4244 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4245 /* For builtins we replace pointers only for selected
4246 function and functions having definitions. */
4247 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4248 && (chkp_instrument_normal_builtin (*op)
4249 || gimple_has_body_p (*op)))))
4250 {
4251 struct cgraph_node *node = cgraph_node::get_create (*op);
4252 struct cgraph_node *clone = NULL;
4253
4254 if (!node->instrumentation_clone)
4255 clone = chkp_maybe_create_clone (*op);
4256
4257 if (clone)
4258 *op = clone->decl;
4259 *walk_subtrees = 0;
4260 }
4261
4262 return NULL;
4263 }
4264
4265 /* This function searches for function pointers in statement
4266 pointed by GSI and replaces them with pointers to instrumented
4267 function versions. */
4268 static void
4269 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4270 {
4271 gimple *stmt = gsi_stmt (*gsi);
4272 /* For calls we want to walk call args only. */
4273 if (gimple_code (stmt) == GIMPLE_CALL)
4274 {
4275 unsigned i;
4276 for (i = 0; i < gimple_call_num_args (stmt); i++)
4277 walk_tree (gimple_call_arg_ptr (stmt, i),
4278 chkp_replace_function_pointer, NULL, NULL);
4279 }
4280 else
4281 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4282 }
4283
4284 /* This function instruments all statements working with memory,
4285 calls and rets.
4286
4287 It also removes excess statements from static initializers. */
4288 static void
4289 chkp_instrument_function (void)
4290 {
4291 basic_block bb, next;
4292 gimple_stmt_iterator i;
4293 enum gimple_rhs_class grhs_class;
4294 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4295
4296 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4297 do
4298 {
4299 next = bb->next_bb;
4300 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4301 {
4302 gimple *s = gsi_stmt (i);
4303
4304 /* Skip statement marked to not be instrumented. */
4305 if (chkp_marked_stmt_p (s))
4306 {
4307 gsi_next (&i);
4308 continue;
4309 }
4310
4311 chkp_replace_function_pointers (&i);
4312
4313 switch (gimple_code (s))
4314 {
4315 case GIMPLE_ASSIGN:
4316 chkp_process_stmt (&i, gimple_assign_lhs (s),
4317 gimple_location (s), integer_one_node,
4318 NULL_TREE, NULL_TREE, safe);
4319 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4320 gimple_location (s), integer_zero_node,
4321 NULL_TREE, NULL_TREE, safe);
4322 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4323 if (grhs_class == GIMPLE_BINARY_RHS)
4324 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4325 gimple_location (s), integer_zero_node,
4326 NULL_TREE, NULL_TREE, safe);
4327 break;
4328
4329 case GIMPLE_RETURN:
4330 {
4331 greturn *r = as_a <greturn *> (s);
4332 if (gimple_return_retval (r) != NULL_TREE)
4333 {
4334 chkp_process_stmt (&i, gimple_return_retval (r),
4335 gimple_location (r),
4336 integer_zero_node,
4337 NULL_TREE, NULL_TREE, safe);
4338
4339 /* Additionally we need to add bounds
4340 to return statement. */
4341 chkp_add_bounds_to_ret_stmt (&i);
4342 }
4343 }
4344 break;
4345
4346 case GIMPLE_CALL:
4347 chkp_add_bounds_to_call_stmt (&i);
4348 break;
4349
4350 default:
4351 ;
4352 }
4353
4354 gsi_next (&i);
4355
4356 /* We do not need any actual pointer stores in checker
4357 static initializer. */
4358 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4359 && gimple_code (s) == GIMPLE_ASSIGN
4360 && gimple_store_p (s))
4361 {
4362 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4363 gsi_remove (&del_iter, true);
4364 unlink_stmt_vdef (s);
4365 release_defs(s);
4366 }
4367 }
4368 bb = next;
4369 }
4370 while (bb);
4371
4372 /* Some input params may have bounds and be address taken. In this case
4373 we should store incoming bounds into bounds table. */
4374 tree arg;
4375 if (flag_chkp_store_bounds)
4376 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4377 if (TREE_ADDRESSABLE (arg))
4378 {
4379 if (BOUNDED_P (arg))
4380 {
4381 tree bounds = chkp_get_next_bounds_parm (arg);
4382 tree def_ptr = ssa_default_def (cfun, arg);
4383 gimple_stmt_iterator iter
4384 = gsi_start_bb (chkp_get_entry_block ());
4385 chkp_build_bndstx (chkp_build_addr_expr (arg),
4386 def_ptr ? def_ptr : arg,
4387 bounds, &iter);
4388
4389 /* Skip bounds arg. */
4390 arg = TREE_CHAIN (arg);
4391 }
4392 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4393 {
4394 tree orig_arg = arg;
4395 bitmap slots = BITMAP_ALLOC (NULL);
4396 gimple_stmt_iterator iter
4397 = gsi_start_bb (chkp_get_entry_block ());
4398 bitmap_iterator bi;
4399 unsigned bnd_no;
4400
4401 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4402
4403 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4404 {
4405 tree bounds = chkp_get_next_bounds_parm (arg);
4406 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4407 tree addr = chkp_build_addr_expr (orig_arg);
4408 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4409 build_int_cst (ptr_type_node, offs));
4410 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4411 bounds, &iter);
4412
4413 arg = DECL_CHAIN (arg);
4414 }
4415 BITMAP_FREE (slots);
4416 }
4417 }
4418 }
4419
4420 /* Find init/null/copy_ptr_bounds calls and replace them
4421 with assignments. It should allow better code
4422 optimization. */
4423
4424 static void
4425 chkp_remove_useless_builtins ()
4426 {
4427 basic_block bb;
4428 gimple_stmt_iterator gsi;
4429
4430 FOR_EACH_BB_FN (bb, cfun)
4431 {
4432 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4433 {
4434 gimple *stmt = gsi_stmt (gsi);
4435 tree fndecl;
4436 enum built_in_function fcode;
4437
4438 /* Find builtins returning first arg and replace
4439 them with assignments. */
4440 if (gimple_code (stmt) == GIMPLE_CALL
4441 && (fndecl = gimple_call_fndecl (stmt))
4442 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4443 && (fcode = DECL_FUNCTION_CODE (fndecl))
4444 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4445 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4446 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4447 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4448 {
4449 tree res = gimple_call_arg (stmt, 0);
4450 update_call_from_tree (&gsi, res);
4451 stmt = gsi_stmt (gsi);
4452 update_stmt (stmt);
4453 }
4454 }
4455 }
4456 }
4457
4458 /* Initialize pass. */
4459 static void
4460 chkp_init (void)
4461 {
4462 basic_block bb;
4463 gimple_stmt_iterator i;
4464
4465 in_chkp_pass = true;
4466
4467 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4468 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4469 chkp_unmark_stmt (gsi_stmt (i));
4470
4471 chkp_invalid_bounds = new hash_set<tree>;
4472 chkp_completed_bounds_set = new hash_set<tree>;
4473 delete chkp_reg_bounds;
4474 chkp_reg_bounds = new hash_map<tree, tree>;
4475 delete chkp_bound_vars;
4476 chkp_bound_vars = new hash_map<tree, tree>;
4477 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4478 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4479 delete chkp_bounds_map;
4480 chkp_bounds_map = new hash_map<tree, tree>;
4481 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4482
4483 entry_block = NULL;
4484 zero_bounds = NULL_TREE;
4485 none_bounds = NULL_TREE;
4486 incomplete_bounds = integer_zero_node;
4487 tmp_var = NULL_TREE;
4488 size_tmp_var = NULL_TREE;
4489
4490 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4491
4492 /* We create these constant bounds once for each object file.
4493 These symbols go to comdat section and result in single copy
4494 of each one in the final binary. */
4495 chkp_get_zero_bounds_var ();
4496 chkp_get_none_bounds_var ();
4497
4498 calculate_dominance_info (CDI_DOMINATORS);
4499 calculate_dominance_info (CDI_POST_DOMINATORS);
4500
4501 bitmap_obstack_initialize (NULL);
4502 }
4503
4504 /* Finalize instrumentation pass. */
4505 static void
4506 chkp_fini (void)
4507 {
4508 in_chkp_pass = false;
4509
4510 delete chkp_invalid_bounds;
4511 delete chkp_completed_bounds_set;
4512 delete chkp_reg_addr_bounds;
4513 delete chkp_incomplete_bounds_map;
4514
4515 free_dominance_info (CDI_DOMINATORS);
4516 free_dominance_info (CDI_POST_DOMINATORS);
4517
4518 bitmap_obstack_release (NULL);
4519
4520 entry_block = NULL;
4521 zero_bounds = NULL_TREE;
4522 none_bounds = NULL_TREE;
4523 }
4524
4525 /* Main instrumentation pass function. */
4526 static unsigned int
4527 chkp_execute (void)
4528 {
4529 chkp_init ();
4530
4531 chkp_instrument_function ();
4532
4533 chkp_remove_useless_builtins ();
4534
4535 chkp_function_mark_instrumented (cfun->decl);
4536
4537 chkp_fix_cfg ();
4538
4539 chkp_fini ();
4540
4541 return 0;
4542 }
4543
4544 /* Instrumentation pass gate. */
4545 static bool
4546 chkp_gate (void)
4547 {
4548 cgraph_node *node = cgraph_node::get (cfun->decl);
4549 return ((node != NULL
4550 && node->instrumentation_clone)
4551 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4552 }
4553
4554 namespace {
4555
4556 const pass_data pass_data_chkp =
4557 {
4558 GIMPLE_PASS, /* type */
4559 "chkp", /* name */
4560 OPTGROUP_NONE, /* optinfo_flags */
4561 TV_NONE, /* tv_id */
4562 PROP_ssa | PROP_cfg, /* properties_required */
4563 0, /* properties_provided */
4564 0, /* properties_destroyed */
4565 0, /* todo_flags_start */
4566 TODO_verify_il
4567 | TODO_update_ssa /* todo_flags_finish */
4568 };
4569
4570 class pass_chkp : public gimple_opt_pass
4571 {
4572 public:
4573 pass_chkp (gcc::context *ctxt)
4574 : gimple_opt_pass (pass_data_chkp, ctxt)
4575 {}
4576
4577 /* opt_pass methods: */
4578 virtual opt_pass * clone ()
4579 {
4580 return new pass_chkp (m_ctxt);
4581 }
4582
4583 virtual bool gate (function *)
4584 {
4585 return chkp_gate ();
4586 }
4587
4588 virtual unsigned int execute (function *)
4589 {
4590 return chkp_execute ();
4591 }
4592
4593 }; // class pass_chkp
4594
4595 } // anon namespace
4596
4597 gimple_opt_pass *
4598 make_pass_chkp (gcc::context *ctxt)
4599 {
4600 return new pass_chkp (ctxt);
4601 }
4602
4603 #include "gt-tree-chkp.h"