tree-core.h (built_in_class): Add builtin codes to be used by Pointer Bounds Checker...
[gcc.git] / gcc / tree-chkp.c
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree-core.h"
25 #include "stor-layout.h"
26 #include "varasm.h"
27 #include "tree.h"
28 #include "target.h"
29 #include "tree-iterator.h"
30 #include "tree-cfg.h"
31 #include "langhooks.h"
32 #include "tree-pass.h"
33 #include "diagnostic.h"
34 #include "ggc.h"
35 #include "is-a.h"
36 #include "cfgloop.h"
37 #include "stringpool.h"
38 #include "tree-ssa-alias.h"
39 #include "tree-ssanames.h"
40 #include "tree-ssa-operands.h"
41 #include "tree-ssa-address.h"
42 #include "tree-ssa.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "basic-block.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "gimple-expr.h"
49 #include "gimple.h"
50 #include "tree-phinodes.h"
51 #include "gimple-ssa.h"
52 #include "ssa-iterators.h"
53 #include "gimple-pretty-print.h"
54 #include "gimple-iterator.h"
55 #include "gimplify.h"
56 #include "gimplify-me.h"
57 #include "print-tree.h"
58 #include "expr.h"
59 #include "tree-ssa-propagate.h"
60 #include "gimple-fold.h"
61 #include "tree-chkp.h"
62 #include "gimple-walk.h"
63 #include "rtl.h" /* For MEM_P, assign_temp. */
64 #include "tree-dfa.h"
65 #include "ipa-ref.h"
66 #include "lto-streamer.h"
67 #include "cgraph.h"
68 #include "ipa-chkp.h"
69 #include "params.h"
70 #include "ipa-chkp.h"
71 #include "params.h"
72
73 /* Pointer Bounds Checker instruments code with memory checks to find
74 out-of-bounds memory accesses. Checks are performed by computing
75 bounds for each pointer and then comparing address of accessed
76 memory before pointer dereferencing.
77
78 1. Function clones.
79
80 See ipa-chkp.c.
81
82 2. Instrumentation.
83
84 There are few things to instrument:
85
86 a) Memory accesses - add checker calls to check address of accessed memory
87 against bounds of dereferenced pointer. Obviously safe memory
88 accesses like static variable access does not have to be instrumented
89 with checks.
90
91 Example:
92
93 val_2 = *p_1;
94
95 with 4 bytes access is transformed into:
96
97 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
98 D.1_4 = p_1 + 3;
99 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
100 val_2 = *p_1;
101
102 where __bound_tmp.1_3 are bounds computed for pointer p_1,
103 __builtin___chkp_bndcl is a lower bound check and
104 __builtin___chkp_bndcu is an upper bound check.
105
106 b) Pointer stores.
107
108 When pointer is stored in memory we need to store its bounds. To
109 achieve compatibility of instrumented code with regular codes
110 we have to keep data layout and store bounds in special bound tables
111 via special checker call. Implementation of bounds table may vary for
112 different platforms. It has to associate pointer value and its
113 location (it is required because we may have two equal pointers
114 with different bounds stored in different places) with bounds.
115 Another checker builtin allows to get bounds for specified pointer
116 loaded from specified location.
117
118 Example:
119
120 buf1[i_1] = &buf2;
121
122 is transformed into:
123
124 buf1[i_1] = &buf2;
125 D.1_2 = &buf1[i_1];
126 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
127
128 where __bound_tmp.1_2 are bounds of &buf2.
129
130 c) Static initialization.
131
132 The special case of pointer store is static pointer initialization.
133 Bounds initialization is performed in a few steps:
134 - register all static initializations in front-end using
135 chkp_register_var_initializer
136 - when file compilation finishes we create functions with special
137 attribute 'chkp ctor' and put explicit initialization code
138 (assignments) for all statically initialized pointers.
139 - when checker constructor is compiled checker pass adds required
140 bounds initialization for all statically initialized pointers
141 - since we do not actually need excess pointers initialization
142 in checker constructor we remove such assignments from them
143
144 d) Calls.
145
146 For each call in the code we add additional arguments to pass
147 bounds for pointer arguments. We determine type of call arguments
148 using arguments list from function declaration; if function
149 declaration is not available we use function type; otherwise
150 (e.g. for unnamed arguments) we use type of passed value. Function
151 declaration/type is replaced with the instrumented one.
152
153 Example:
154
155 val_1 = foo (&buf1, &buf2, &buf1, 0);
156
157 is translated into:
158
159 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
160 &buf1, __bound_tmp.1_2, 0);
161
162 e) Returns.
163
164 If function returns a pointer value we have to return bounds also.
165 A new operand was added for return statement to hold returned bounds.
166
167 Example:
168
169 return &_buf1;
170
171 is transformed into
172
173 return &_buf1, __bound_tmp.1_1;
174
175 3. Bounds computation.
176
177 Compiler is fully responsible for computing bounds to be used for each
178 memory access. The first step for bounds computation is to find the
179 origin of pointer dereferenced for memory access. Basing on pointer
180 origin we define a way to compute its bounds. There are just few
181 possible cases:
182
183 a) Pointer is returned by call.
184
185 In this case we use corresponding checker builtin method to obtain returned
186 bounds.
187
188 Example:
189
190 buf_1 = malloc (size_2);
191 foo (buf_1);
192
193 is translated into:
194
195 buf_1 = malloc (size_2);
196 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
197 foo (buf_1, __bound_tmp.1_3);
198
199 b) Pointer is an address of an object.
200
201 In this case compiler tries to compute objects size and create corresponding
202 bounds. If object has incomplete type then special checker builtin is used to
203 obtain its size at runtime.
204
205 Example:
206
207 foo ()
208 {
209 <unnamed type> __bound_tmp.3;
210 static int buf[100];
211
212 <bb 3>:
213 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
214
215 <bb 2>:
216 return &buf, __bound_tmp.3_2;
217 }
218
219 Example:
220
221 Address of an object 'extern int buf[]' with incomplete type is
222 returned.
223
224 foo ()
225 {
226 <unnamed type> __bound_tmp.4;
227 long unsigned int __size_tmp.3;
228
229 <bb 3>:
230 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
231 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
232
233 <bb 2>:
234 return &buf, __bound_tmp.4_3;
235 }
236
237 c) Pointer is the result of object narrowing.
238
239 It happens when we use pointer to an object to compute pointer to a part
240 of an object. E.g. we take pointer to a field of a structure. In this
241 case we perform bounds intersection using bounds of original object and
242 bounds of object's part (which are computed basing on its type).
243
244 There may be some debatable questions about when narrowing should occur
245 and when it should not. To avoid false bound violations in correct
246 programs we do not perform narrowing when address of an array element is
247 obtained (it has address of the whole array) and when address of the first
248 structure field is obtained (because it is guaranteed to be equal to
249 address of the whole structure and it is legal to cast it back to structure).
250
251 Default narrowing behavior may be changed using compiler flags.
252
253 Example:
254
255 In this example address of the second structure field is returned.
256
257 foo (struct A * p, __bounds_type __bounds_of_p)
258 {
259 <unnamed type> __bound_tmp.3;
260 int * _2;
261 int * _5;
262
263 <bb 2>:
264 _5 = &p_1(D)->second_field;
265 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
266 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
267 __bounds_of_p_3(D));
268 _2 = &p_1(D)->second_field;
269 return _2, __bound_tmp.3_8;
270 }
271
272 Example:
273
274 In this example address of the first field of array element is returned.
275
276 foo (struct A * p, __bounds_type __bounds_of_p, int i)
277 {
278 long unsigned int _3;
279 long unsigned int _4;
280 struct A * _6;
281 int * _7;
282
283 <bb 2>:
284 _3 = (long unsigned int) i_1(D);
285 _4 = _3 * 8;
286 _6 = p_5(D) + _4;
287 _7 = &_6->first_field;
288 return _7, __bounds_of_p_2(D);
289 }
290
291
292 d) Pointer is the result of pointer arithmetic or type cast.
293
294 In this case bounds of the base pointer are used. In case of binary
295 operation producing a pointer we are analyzing data flow further
296 looking for operand's bounds. One operand is considered as a base
297 if it has some valid bounds. If we fall into a case when none of
298 operands (or both of them) has valid bounds, a default bounds value
299 is used.
300
301 Trying to find out bounds for binary operations we may fall into
302 cyclic dependencies for pointers. To avoid infinite recursion all
303 walked phi nodes instantly obtain corresponding bounds but created
304 bounds are marked as incomplete. It helps us to stop DF walk during
305 bounds search.
306
307 When we reach pointer source, some args of incomplete bounds phi obtain
308 valid bounds and those values are propagated further through phi nodes.
309 If no valid bounds were found for phi node then we mark its result as
310 invalid bounds. Process stops when all incomplete bounds become either
311 valid or invalid and we are able to choose a pointer base.
312
313 e) Pointer is loaded from the memory.
314
315 In this case we just need to load bounds from the bounds table.
316
317 Example:
318
319 foo ()
320 {
321 <unnamed type> __bound_tmp.3;
322 static int * buf;
323 int * _2;
324
325 <bb 2>:
326 _2 = buf;
327 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
328 return _2, __bound_tmp.3_4;
329 }
330
331 */
332
333 typedef void (*assign_handler)(tree, tree, void *);
334
335 static tree chkp_get_zero_bounds ();
336 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
337 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
338 gimple_stmt_iterator *iter);
339 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
340 tree *elt, bool *safe,
341 bool *bitfield,
342 tree *bounds,
343 gimple_stmt_iterator *iter,
344 bool innermost_bounds);
345
346 #define chkp_bndldx_fndecl \
347 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
348 #define chkp_bndstx_fndecl \
349 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
350 #define chkp_checkl_fndecl \
351 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
352 #define chkp_checku_fndecl \
353 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
354 #define chkp_bndmk_fndecl \
355 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
356 #define chkp_ret_bnd_fndecl \
357 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
358 #define chkp_intersect_fndecl \
359 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
360 #define chkp_narrow_bounds_fndecl \
361 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
362 #define chkp_sizeof_fndecl \
363 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
364 #define chkp_extract_lower_fndecl \
365 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
366 #define chkp_extract_upper_fndecl \
367 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
368
369 static GTY (()) tree chkp_uintptr_type;
370
371 static GTY (()) tree chkp_zero_bounds_var;
372 static GTY (()) tree chkp_none_bounds_var;
373
374 static GTY (()) basic_block entry_block;
375 static GTY (()) tree zero_bounds;
376 static GTY (()) tree none_bounds;
377 static GTY (()) tree incomplete_bounds;
378 static GTY (()) tree tmp_var;
379 static GTY (()) tree size_tmp_var;
380 static GTY (()) bitmap chkp_abnormal_copies;
381
382 struct hash_set<tree> *chkp_invalid_bounds;
383 struct hash_set<tree> *chkp_completed_bounds_set;
384 struct hash_map<tree, tree> *chkp_reg_bounds;
385 struct hash_map<tree, tree> *chkp_bound_vars;
386 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
387 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
388 struct hash_map<tree, tree> *chkp_bounds_map;
389 struct hash_map<tree, tree> *chkp_static_var_bounds;
390
391 static bool in_chkp_pass;
392
393 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
394 #define CHKP_SIZE_TMP_NAME "__size_tmp"
395 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
396 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
397 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
398 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
399 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
400
401 /* Static checker constructors may become very large and their
402 compilation with optimization may take too much time.
403 Therefore we put a limit to number of statements in one
404 constructor. Tests with 100 000 statically initialized
405 pointers showed following compilation times on Sandy Bridge
406 server (used -O2):
407 limit 100 => ~18 sec.
408 limit 300 => ~22 sec.
409 limit 1000 => ~30 sec.
410 limit 3000 => ~49 sec.
411 limit 5000 => ~55 sec.
412 limit 10000 => ~76 sec.
413 limit 100000 => ~532 sec. */
414 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
415
416 struct chkp_ctor_stmt_list
417 {
418 tree stmts;
419 int avail;
420 };
421
422 /* Return 1 if function FNDECL is instrumented by Pointer
423 Bounds Checker. */
424 bool
425 chkp_function_instrumented_p (tree fndecl)
426 {
427 return fndecl
428 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
429 }
430
431 /* Mark function FNDECL as instrumented. */
432 void
433 chkp_function_mark_instrumented (tree fndecl)
434 {
435 if (chkp_function_instrumented_p (fndecl))
436 return;
437
438 DECL_ATTRIBUTES (fndecl)
439 = tree_cons (get_identifier ("chkp instrumented"), NULL,
440 DECL_ATTRIBUTES (fndecl));
441 }
442
443 /* Return true when STMT is builtin call to instrumentation function
444 corresponding to CODE. */
445
446 bool
447 chkp_gimple_call_builtin_p (gimple call,
448 enum built_in_function code)
449 {
450 tree fndecl;
451 if (is_gimple_call (call)
452 && (fndecl = targetm.builtin_chkp_function (code))
453 && gimple_call_fndecl (call) == fndecl)
454 return true;
455 return false;
456 }
457
458 /* Emit code to store zero bounds for PTR located at MEM. */
459 void
460 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
461 {
462 tree zero_bnd, bnd, addr, bndstx;
463
464 if (flag_chkp_use_static_const_bounds)
465 zero_bnd = chkp_get_zero_bounds_var ();
466 else
467 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
468 integer_zero_node);
469 bnd = make_tree (pointer_bounds_type_node,
470 assign_temp (pointer_bounds_type_node, 0, 1));
471 addr = build1 (ADDR_EXPR,
472 build_pointer_type (TREE_TYPE (mem)), mem);
473 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
474
475 expand_assignment (bnd, zero_bnd, false);
476 expand_normal (bndstx);
477 }
478
479 /* Mark statement S to not be instrumented. */
480 static void
481 chkp_mark_stmt (gimple s)
482 {
483 gimple_set_plf (s, GF_PLF_1, true);
484 }
485
486 /* Mark statement S to be instrumented. */
487 static void
488 chkp_unmark_stmt (gimple s)
489 {
490 gimple_set_plf (s, GF_PLF_1, false);
491 }
492
493 /* Return 1 if statement S should not be instrumented. */
494 static bool
495 chkp_marked_stmt_p (gimple s)
496 {
497 return gimple_plf (s, GF_PLF_1);
498 }
499
500 /* Get var to be used for bound temps. */
501 static tree
502 chkp_get_tmp_var (void)
503 {
504 if (!tmp_var)
505 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
506
507 return tmp_var;
508 }
509
510 /* Get SSA_NAME to be used as temp. */
511 static tree
512 chkp_get_tmp_reg (gimple stmt)
513 {
514 if (in_chkp_pass)
515 return make_ssa_name (chkp_get_tmp_var (), stmt);
516
517 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
518 CHKP_BOUND_TMP_NAME);
519 }
520
521 /* Get var to be used for size temps. */
522 static tree
523 chkp_get_size_tmp_var (void)
524 {
525 if (!size_tmp_var)
526 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
527
528 return size_tmp_var;
529 }
530
531 /* Register bounds BND for address of OBJ. */
532 static void
533 chkp_register_addr_bounds (tree obj, tree bnd)
534 {
535 if (bnd == incomplete_bounds)
536 return;
537
538 chkp_reg_addr_bounds->put (obj, bnd);
539
540 if (dump_file && (dump_flags & TDF_DETAILS))
541 {
542 fprintf (dump_file, "Regsitered bound ");
543 print_generic_expr (dump_file, bnd, 0);
544 fprintf (dump_file, " for address of ");
545 print_generic_expr (dump_file, obj, 0);
546 fprintf (dump_file, "\n");
547 }
548 }
549
550 /* Return bounds registered for address of OBJ. */
551 static tree
552 chkp_get_registered_addr_bounds (tree obj)
553 {
554 tree *slot = chkp_reg_addr_bounds->get (obj);
555 return slot ? *slot : NULL_TREE;
556 }
557
558 /* Mark BOUNDS as completed. */
559 static void
560 chkp_mark_completed_bounds (tree bounds)
561 {
562 chkp_completed_bounds_set->add (bounds);
563
564 if (dump_file && (dump_flags & TDF_DETAILS))
565 {
566 fprintf (dump_file, "Marked bounds ");
567 print_generic_expr (dump_file, bounds, 0);
568 fprintf (dump_file, " as completed\n");
569 }
570 }
571
572 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
573 static bool
574 chkp_completed_bounds (tree bounds)
575 {
576 return chkp_completed_bounds_set->contains (bounds);
577 }
578
579 /* Clear comleted bound marks. */
580 static void
581 chkp_erase_completed_bounds (void)
582 {
583 delete chkp_completed_bounds_set;
584 chkp_completed_bounds_set = new hash_set<tree>;
585 }
586
587 /* Mark BOUNDS associated with PTR as incomplete. */
588 static void
589 chkp_register_incomplete_bounds (tree bounds, tree ptr)
590 {
591 chkp_incomplete_bounds_map->put (bounds, ptr);
592
593 if (dump_file && (dump_flags & TDF_DETAILS))
594 {
595 fprintf (dump_file, "Regsitered incomplete bounds ");
596 print_generic_expr (dump_file, bounds, 0);
597 fprintf (dump_file, " for ");
598 print_generic_expr (dump_file, ptr, 0);
599 fprintf (dump_file, "\n");
600 }
601 }
602
603 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
604 static bool
605 chkp_incomplete_bounds (tree bounds)
606 {
607 if (bounds == incomplete_bounds)
608 return true;
609
610 if (chkp_completed_bounds (bounds))
611 return false;
612
613 return chkp_incomplete_bounds_map->get (bounds) != NULL;
614 }
615
616 /* Clear incomleted bound marks. */
617 static void
618 chkp_erase_incomplete_bounds (void)
619 {
620 delete chkp_incomplete_bounds_map;
621 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
622 }
623
624 /* Build and return bndmk call which creates bounds for structure
625 pointed by PTR. Structure should have complete type. */
626 tree
627 chkp_make_bounds_for_struct_addr (tree ptr)
628 {
629 tree type = TREE_TYPE (ptr);
630 tree size;
631
632 gcc_assert (POINTER_TYPE_P (type));
633
634 size = TYPE_SIZE (TREE_TYPE (type));
635
636 gcc_assert (size);
637
638 return build_call_nary (pointer_bounds_type_node,
639 build_fold_addr_expr (chkp_bndmk_fndecl),
640 2, ptr, size);
641 }
642
643 /* Traversal function for chkp_may_finish_incomplete_bounds.
644 Set RES to 0 if at least one argument of phi statement
645 defining bounds (passed in KEY arg) is unknown.
646 Traversal stops when first unknown phi argument is found. */
647 bool
648 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
649 bool *res)
650 {
651 gimple phi;
652 unsigned i;
653
654 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
655
656 phi = SSA_NAME_DEF_STMT (bounds);
657
658 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
659
660 for (i = 0; i < gimple_phi_num_args (phi); i++)
661 {
662 tree phi_arg = gimple_phi_arg_def (phi, i);
663 if (!phi_arg)
664 {
665 *res = false;
666 /* Do not need to traverse further. */
667 return false;
668 }
669 }
670
671 return true;
672 }
673
674 /* Return 1 if all phi nodes created for bounds have their
675 arguments computed. */
676 static bool
677 chkp_may_finish_incomplete_bounds (void)
678 {
679 bool res = true;
680
681 chkp_incomplete_bounds_map
682 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
683
684 return res;
685 }
686
687 /* Helper function for chkp_finish_incomplete_bounds.
688 Recompute args for bounds phi node. */
689 bool
690 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
691 void *res ATTRIBUTE_UNUSED)
692 {
693 tree ptr = *slot;
694 gimple bounds_phi;
695 gimple ptr_phi;
696 unsigned i;
697
698 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
699 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
700
701 bounds_phi = SSA_NAME_DEF_STMT (bounds);
702 ptr_phi = SSA_NAME_DEF_STMT (ptr);
703
704 gcc_assert (bounds_phi && gimple_code (bounds_phi) == GIMPLE_PHI);
705 gcc_assert (ptr_phi && gimple_code (ptr_phi) == GIMPLE_PHI);
706
707 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
708 {
709 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
710 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
711
712 add_phi_arg (bounds_phi, bound_arg,
713 gimple_phi_arg_edge (ptr_phi, i),
714 UNKNOWN_LOCATION);
715 }
716
717 return true;
718 }
719
720 /* Mark BOUNDS as invalid. */
721 static void
722 chkp_mark_invalid_bounds (tree bounds)
723 {
724 chkp_invalid_bounds->add (bounds);
725
726 if (dump_file && (dump_flags & TDF_DETAILS))
727 {
728 fprintf (dump_file, "Marked bounds ");
729 print_generic_expr (dump_file, bounds, 0);
730 fprintf (dump_file, " as invalid\n");
731 }
732 }
733
734 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
735 static bool
736 chkp_valid_bounds (tree bounds)
737 {
738 if (bounds == zero_bounds || bounds == none_bounds)
739 return false;
740
741 return !chkp_invalid_bounds->contains (bounds);
742 }
743
744 /* Helper function for chkp_finish_incomplete_bounds.
745 Check all arguments of phi nodes trying to find
746 valid completed bounds. If there is at least one
747 such arg then bounds produced by phi node are marked
748 as valid completed bounds and all phi args are
749 recomputed. */
750 bool
751 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
752 {
753 gimple phi;
754 unsigned i;
755
756 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
757
758 if (chkp_completed_bounds (bounds))
759 return true;
760
761 phi = SSA_NAME_DEF_STMT (bounds);
762
763 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
764
765 for (i = 0; i < gimple_phi_num_args (phi); i++)
766 {
767 tree phi_arg = gimple_phi_arg_def (phi, i);
768
769 gcc_assert (phi_arg);
770
771 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
772 {
773 *res = true;
774 chkp_mark_completed_bounds (bounds);
775 chkp_recompute_phi_bounds (bounds, slot, NULL);
776 return true;
777 }
778 }
779
780 return true;
781 }
782
783 /* Helper function for chkp_finish_incomplete_bounds.
784 Marks all incompleted bounds as invalid. */
785 bool
786 chkp_mark_invalid_bounds_walker (tree const &bounds,
787 tree *slot ATTRIBUTE_UNUSED,
788 void *res ATTRIBUTE_UNUSED)
789 {
790 if (!chkp_completed_bounds (bounds))
791 {
792 chkp_mark_invalid_bounds (bounds);
793 chkp_mark_completed_bounds (bounds);
794 }
795 return true;
796 }
797
798 /* When all bound phi nodes have all their args computed
799 we have enough info to find valid bounds. We iterate
800 through all incompleted bounds searching for valid
801 bounds. Found valid bounds are marked as completed
802 and all remaining incompleted bounds are recomputed.
803 Process continues until no new valid bounds may be
804 found. All remained incompleted bounds are marked as
805 invalid (i.e. have no valid source of bounds). */
806 static void
807 chkp_finish_incomplete_bounds (void)
808 {
809 bool found_valid;
810
811 while (found_valid)
812 {
813 found_valid = false;
814
815 chkp_incomplete_bounds_map->
816 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
817
818 if (found_valid)
819 chkp_incomplete_bounds_map->
820 traverse<void *, chkp_recompute_phi_bounds> (NULL);
821 }
822
823 chkp_incomplete_bounds_map->
824 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
825 chkp_incomplete_bounds_map->
826 traverse<void *, chkp_recompute_phi_bounds> (NULL);
827
828 chkp_erase_completed_bounds ();
829 chkp_erase_incomplete_bounds ();
830 }
831
832 /* Return 1 if type TYPE is a pointer type or a
833 structure having a pointer type as one of its fields.
834 Otherwise return 0. */
835 bool
836 chkp_type_has_pointer (const_tree type)
837 {
838 bool res = false;
839
840 if (BOUNDED_TYPE_P (type))
841 res = true;
842 else if (RECORD_OR_UNION_TYPE_P (type))
843 {
844 tree field;
845
846 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
847 if (TREE_CODE (field) == FIELD_DECL)
848 res = res || chkp_type_has_pointer (TREE_TYPE (field));
849 }
850 else if (TREE_CODE (type) == ARRAY_TYPE)
851 res = chkp_type_has_pointer (TREE_TYPE (type));
852
853 return res;
854 }
855
856 unsigned
857 chkp_type_bounds_count (const_tree type)
858 {
859 unsigned res = 0;
860
861 if (!type)
862 res = 0;
863 else if (BOUNDED_TYPE_P (type))
864 res = 1;
865 else if (RECORD_OR_UNION_TYPE_P (type))
866 {
867 bitmap have_bound;
868
869 bitmap_obstack_initialize (NULL);
870 have_bound = BITMAP_ALLOC (NULL);
871 chkp_find_bound_slots (type, have_bound);
872 res = bitmap_count_bits (have_bound);
873 BITMAP_FREE (have_bound);
874 bitmap_obstack_release (NULL);
875 }
876
877 return res;
878 }
879
880 /* Get bounds associated with NODE via
881 chkp_set_bounds call. */
882 tree
883 chkp_get_bounds (tree node)
884 {
885 tree *slot;
886
887 if (!chkp_bounds_map)
888 return NULL_TREE;
889
890 slot = chkp_bounds_map->get (node);
891 return slot ? *slot : NULL_TREE;
892 }
893
894 /* Associate bounds VAL with NODE. */
895 void
896 chkp_set_bounds (tree node, tree val)
897 {
898 if (!chkp_bounds_map)
899 chkp_bounds_map = new hash_map<tree, tree>;
900
901 chkp_bounds_map->put (node, val);
902 }
903
904 /* Check if statically initialized variable VAR require
905 static bounds initialization. If VAR is added into
906 bounds initlization list then 1 is returned. Otherwise
907 return 0. */
908 extern bool
909 chkp_register_var_initializer (tree var)
910 {
911 if (!flag_check_pointer_bounds
912 || DECL_INITIAL (var) == error_mark_node)
913 return false;
914
915 gcc_assert (TREE_CODE (var) == VAR_DECL);
916 gcc_assert (DECL_INITIAL (var));
917
918 if (TREE_STATIC (var)
919 && chkp_type_has_pointer (TREE_TYPE (var)))
920 {
921 varpool_node::get_create (var)->need_bounds_init = 1;
922 return true;
923 }
924
925 return false;
926 }
927
928 /* Helper function for chkp_finish_file.
929
930 Add new modification statement (RHS is assigned to LHS)
931 into list of static initializer statementes (passed in ARG).
932 If statements list becomes too big, emit checker constructor
933 and start the new one. */
934 static void
935 chkp_add_modification_to_stmt_list (tree lhs,
936 tree rhs,
937 void *arg)
938 {
939 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
940 tree modify;
941
942 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
943 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
944
945 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
946 append_to_statement_list (modify, &stmts->stmts);
947
948 stmts->avail--;
949 }
950
951 /* Build and return ADDR_EXPR for specified object OBJ. */
952 static tree
953 chkp_build_addr_expr (tree obj)
954 {
955 return TREE_CODE (obj) == TARGET_MEM_REF
956 ? tree_mem_ref_addr (ptr_type_node, obj)
957 : build_fold_addr_expr (obj);
958 }
959
960 /* Helper function for chkp_finish_file.
961 Initialize bound variable BND_VAR with bounds of variable
962 VAR to statements list STMTS. If statements list becomes
963 too big, emit checker constructor and start the new one. */
964 static void
965 chkp_output_static_bounds (tree bnd_var, tree var,
966 struct chkp_ctor_stmt_list *stmts)
967 {
968 tree lb, ub, size;
969
970 if (TREE_CODE (var) == STRING_CST)
971 {
972 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
973 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
974 }
975 else if (DECL_SIZE (var)
976 && !chkp_variable_size_type (TREE_TYPE (var)))
977 {
978 /* Compute bounds using statically known size. */
979 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
980 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
981 }
982 else
983 {
984 /* Compute bounds using dynamic size. */
985 tree call;
986
987 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
988 call = build1 (ADDR_EXPR,
989 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
990 chkp_sizeof_fndecl);
991 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
992 call, 1, var);
993
994 if (flag_chkp_zero_dynamic_size_as_infinite)
995 {
996 tree max_size, cond;
997
998 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
999 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1000 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1001 }
1002
1003 size = size_binop (MINUS_EXPR, size, size_one_node);
1004 }
1005
1006 ub = size_binop (PLUS_EXPR, lb, size);
1007 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1008 &stmts->stmts);
1009 if (stmts->avail <= 0)
1010 {
1011 cgraph_build_static_cdtor ('B', stmts->stmts,
1012 MAX_RESERVED_INIT_PRIORITY + 2);
1013 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1014 stmts->stmts = NULL;
1015 }
1016 }
1017
1018 /* Return entry block to be used for checker initilization code.
1019 Create new block if required. */
1020 static basic_block
1021 chkp_get_entry_block (void)
1022 {
1023 if (!entry_block)
1024 entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
1025
1026 return entry_block;
1027 }
1028
1029 /* Return a bounds var to be used for pointer var PTR_VAR. */
1030 static tree
1031 chkp_get_bounds_var (tree ptr_var)
1032 {
1033 tree bnd_var;
1034 tree *slot;
1035
1036 slot = chkp_bound_vars->get (ptr_var);
1037 if (slot)
1038 bnd_var = *slot;
1039 else
1040 {
1041 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1042 CHKP_BOUND_TMP_NAME);
1043 chkp_bound_vars->put (ptr_var, bnd_var);
1044 }
1045
1046 return bnd_var;
1047 }
1048
1049
1050
1051 /* Register bounds BND for object PTR in global bounds table.
1052 A copy of bounds may be created for abnormal ssa names.
1053 Returns bounds to use for PTR. */
1054 static tree
1055 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1056 {
1057 bool abnormal_ptr;
1058
1059 if (!chkp_reg_bounds)
1060 return bnd;
1061
1062 /* Do nothing if bounds are incomplete_bounds
1063 because it means bounds will be recomputed. */
1064 if (bnd == incomplete_bounds)
1065 return bnd;
1066
1067 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1068 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1069 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1070
1071 /* A single bounds value may be reused multiple times for
1072 different pointer values. It may cause coalescing issues
1073 for abnormal SSA names. To avoid it we create a bounds
1074 copy in case it is computed for abnormal SSA name.
1075
1076 We also cannot reuse such created copies for other pointers */
1077 if (abnormal_ptr
1078 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1079 {
1080 tree bnd_var = NULL_TREE;
1081
1082 if (abnormal_ptr)
1083 {
1084 if (SSA_NAME_VAR (ptr))
1085 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1086 }
1087 else
1088 bnd_var = chkp_get_tmp_var ();
1089
1090 /* For abnormal copies we may just find original
1091 bounds and use them. */
1092 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1093 {
1094 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1095 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1096 bnd = gimple_assign_rhs1 (bnd_def);
1097 }
1098 /* For undefined values we usually use none bounds
1099 value but in case of abnormal edge it may cause
1100 coalescing failures. Use default definition of
1101 bounds variable instead to avoid it. */
1102 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1103 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1104 {
1105 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1106
1107 if (dump_file && (dump_flags & TDF_DETAILS))
1108 {
1109 fprintf (dump_file, "Using default def bounds ");
1110 print_generic_expr (dump_file, bnd, 0);
1111 fprintf (dump_file, " for abnormal default def SSA name ");
1112 print_generic_expr (dump_file, ptr, 0);
1113 fprintf (dump_file, "\n");
1114 }
1115 }
1116 else
1117 {
1118 tree copy;
1119 gimple def = SSA_NAME_DEF_STMT (ptr);
1120 gimple assign;
1121 gimple_stmt_iterator gsi;
1122
1123 if (bnd_var)
1124 copy = make_ssa_name (bnd_var, gimple_build_nop ());
1125 else
1126 copy = make_temp_ssa_name (pointer_bounds_type_node,
1127 gimple_build_nop (),
1128 CHKP_BOUND_TMP_NAME);
1129 assign = gimple_build_assign (copy, bnd);
1130
1131 if (dump_file && (dump_flags & TDF_DETAILS))
1132 {
1133 fprintf (dump_file, "Creating a copy of bounds ");
1134 print_generic_expr (dump_file, bnd, 0);
1135 fprintf (dump_file, " for abnormal SSA name ");
1136 print_generic_expr (dump_file, ptr, 0);
1137 fprintf (dump_file, "\n");
1138 }
1139
1140 if (gimple_code (def) == GIMPLE_NOP)
1141 {
1142 gsi = gsi_last_bb (chkp_get_entry_block ());
1143 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1144 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1145 else
1146 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1147 }
1148 else
1149 {
1150 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1151 /* Sometimes (e.g. when we load a pointer from a
1152 memory) bounds are produced later than a pointer.
1153 We need to insert bounds copy appropriately. */
1154 if (gimple_code (bnd_def) != GIMPLE_NOP
1155 && stmt_dominates_stmt_p (def, bnd_def))
1156 gsi = gsi_for_stmt (bnd_def);
1157 else
1158 gsi = gsi_for_stmt (def);
1159 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1160 }
1161
1162 bnd = copy;
1163 }
1164
1165 if (abnormal_ptr)
1166 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1167 }
1168
1169 chkp_reg_bounds->put (ptr, bnd);
1170
1171 if (dump_file && (dump_flags & TDF_DETAILS))
1172 {
1173 fprintf (dump_file, "Regsitered bound ");
1174 print_generic_expr (dump_file, bnd, 0);
1175 fprintf (dump_file, " for pointer ");
1176 print_generic_expr (dump_file, ptr, 0);
1177 fprintf (dump_file, "\n");
1178 }
1179
1180 return bnd;
1181 }
1182
1183 /* Get bounds registered for object PTR in global bounds table. */
1184 static tree
1185 chkp_get_registered_bounds (tree ptr)
1186 {
1187 tree *slot;
1188
1189 if (!chkp_reg_bounds)
1190 return NULL_TREE;
1191
1192 slot = chkp_reg_bounds->get (ptr);
1193 return slot ? *slot : NULL_TREE;
1194 }
1195
1196 /* Add bound retvals to return statement pointed by GSI. */
1197
1198 static void
1199 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1200 {
1201 gimple ret = gsi_stmt (*gsi);
1202 tree retval = gimple_return_retval (ret);
1203 tree ret_decl = DECL_RESULT (cfun->decl);
1204 tree bounds;
1205
1206 if (!retval)
1207 return;
1208
1209 if (BOUNDED_P (ret_decl))
1210 {
1211 bounds = chkp_find_bounds (retval, gsi);
1212 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1213 gimple_return_set_retbnd (ret, bounds);
1214 }
1215
1216 update_stmt (ret);
1217 }
1218
1219 /* Force OP to be suitable for using as an argument for call.
1220 New statements (if any) go to SEQ. */
1221 static tree
1222 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1223 {
1224 gimple_seq stmts;
1225 gimple_stmt_iterator si;
1226
1227 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1228
1229 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1230 chkp_mark_stmt (gsi_stmt (si));
1231
1232 gimple_seq_add_seq (seq, stmts);
1233
1234 return op;
1235 }
1236
1237 /* Generate lower bound check for memory access by ADDR.
1238 Check is inserted before the position pointed by ITER.
1239 DIRFLAG indicates whether memory access is load or store. */
1240 static void
1241 chkp_check_lower (tree addr, tree bounds,
1242 gimple_stmt_iterator iter,
1243 location_t location,
1244 tree dirflag)
1245 {
1246 gimple_seq seq;
1247 gimple check;
1248 tree node;
1249
1250 if (bounds == chkp_get_zero_bounds ())
1251 return;
1252
1253 if (dirflag == integer_zero_node
1254 && !flag_chkp_check_read)
1255 return;
1256
1257 if (dirflag == integer_one_node
1258 && !flag_chkp_check_write)
1259 return;
1260
1261 seq = NULL;
1262
1263 node = chkp_force_gimple_call_op (addr, &seq);
1264
1265 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1266 chkp_mark_stmt (check);
1267 gimple_call_set_with_bounds (check, true);
1268 gimple_set_location (check, location);
1269 gimple_seq_add_stmt (&seq, check);
1270
1271 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1272
1273 if (dump_file && (dump_flags & TDF_DETAILS))
1274 {
1275 gimple before = gsi_stmt (iter);
1276 fprintf (dump_file, "Generated lower bound check for statement ");
1277 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1278 fprintf (dump_file, " ");
1279 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1280 }
1281 }
1282
1283 /* Generate upper bound check for memory access by ADDR.
1284 Check is inserted before the position pointed by ITER.
1285 DIRFLAG indicates whether memory access is load or store. */
1286 static void
1287 chkp_check_upper (tree addr, tree bounds,
1288 gimple_stmt_iterator iter,
1289 location_t location,
1290 tree dirflag)
1291 {
1292 gimple_seq seq;
1293 gimple check;
1294 tree node;
1295
1296 if (bounds == chkp_get_zero_bounds ())
1297 return;
1298
1299 if (dirflag == integer_zero_node
1300 && !flag_chkp_check_read)
1301 return;
1302
1303 if (dirflag == integer_one_node
1304 && !flag_chkp_check_write)
1305 return;
1306
1307 seq = NULL;
1308
1309 node = chkp_force_gimple_call_op (addr, &seq);
1310
1311 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1312 chkp_mark_stmt (check);
1313 gimple_call_set_with_bounds (check, true);
1314 gimple_set_location (check, location);
1315 gimple_seq_add_stmt (&seq, check);
1316
1317 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1318
1319 if (dump_file && (dump_flags & TDF_DETAILS))
1320 {
1321 gimple before = gsi_stmt (iter);
1322 fprintf (dump_file, "Generated upper bound check for statement ");
1323 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1324 fprintf (dump_file, " ");
1325 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1326 }
1327 }
1328
1329 /* Generate lower and upper bound checks for memory access
1330 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1331 are inserted before the position pointed by ITER.
1332 DIRFLAG indicates whether memory access is load or store. */
1333 void
1334 chkp_check_mem_access (tree first, tree last, tree bounds,
1335 gimple_stmt_iterator iter,
1336 location_t location,
1337 tree dirflag)
1338 {
1339 chkp_check_lower (first, bounds, iter, location, dirflag);
1340 chkp_check_upper (last, bounds, iter, location, dirflag);
1341 }
1342
1343 /* Replace call to _bnd_chk_* pointed by GSI with
1344 bndcu and bndcl calls. DIRFLAG determines whether
1345 check is for read or write. */
1346
1347 void
1348 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1349 tree dirflag)
1350 {
1351 gimple_stmt_iterator call_iter = *gsi;
1352 gimple call = gsi_stmt (*gsi);
1353 tree fndecl = gimple_call_fndecl (call);
1354 tree addr = gimple_call_arg (call, 0);
1355 tree bounds = chkp_find_bounds (addr, gsi);
1356
1357 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1358 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1359 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1360
1361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1362 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1363
1364 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1365 {
1366 tree size = gimple_call_arg (call, 1);
1367 addr = fold_build_pointer_plus (addr, size);
1368 addr = fold_build_pointer_plus_hwi (addr, -1);
1369 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1370 }
1371
1372 gsi_remove (&call_iter, true);
1373 }
1374
1375 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1376 corresponding bounds extract call. */
1377
1378 void
1379 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1380 {
1381 gimple call = gsi_stmt (*gsi);
1382 tree fndecl = gimple_call_fndecl (call);
1383 tree addr = gimple_call_arg (call, 0);
1384 tree bounds = chkp_find_bounds (addr, gsi);
1385 gimple extract;
1386
1387 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1388 fndecl = chkp_extract_lower_fndecl;
1389 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1390 fndecl = chkp_extract_upper_fndecl;
1391 else
1392 gcc_unreachable ();
1393
1394 extract = gimple_build_call (fndecl, 1, bounds);
1395 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1396 chkp_mark_stmt (extract);
1397
1398 gsi_replace (gsi, extract, false);
1399 }
1400
1401 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1402 static tree
1403 chkp_build_component_ref (tree obj, tree field)
1404 {
1405 tree res;
1406
1407 /* If object is TMR then we do not use component_ref but
1408 add offset instead. We need it to be able to get addr
1409 of the reasult later. */
1410 if (TREE_CODE (obj) == TARGET_MEM_REF)
1411 {
1412 tree offs = TMR_OFFSET (obj);
1413 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1414 offs, DECL_FIELD_OFFSET (field));
1415
1416 gcc_assert (offs);
1417
1418 res = copy_node (obj);
1419 TREE_TYPE (res) = TREE_TYPE (field);
1420 TMR_OFFSET (res) = offs;
1421 }
1422 else
1423 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1424
1425 return res;
1426 }
1427
1428 /* Return ARRAY_REF for array ARR and index IDX with
1429 specified element type ETYPE and element size ESIZE. */
1430 static tree
1431 chkp_build_array_ref (tree arr, tree etype, tree esize,
1432 unsigned HOST_WIDE_INT idx)
1433 {
1434 tree index = build_int_cst (size_type_node, idx);
1435 tree res;
1436
1437 /* If object is TMR then we do not use array_ref but
1438 add offset instead. We need it to be able to get addr
1439 of the reasult later. */
1440 if (TREE_CODE (arr) == TARGET_MEM_REF)
1441 {
1442 tree offs = TMR_OFFSET (arr);
1443
1444 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1445 esize, index);
1446 gcc_assert(esize);
1447
1448 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1449 offs, esize);
1450 gcc_assert (offs);
1451
1452 res = copy_node (arr);
1453 TREE_TYPE (res) = etype;
1454 TMR_OFFSET (res) = offs;
1455 }
1456 else
1457 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1458
1459 return res;
1460 }
1461
1462 /* Helper function for chkp_add_bounds_to_call_stmt.
1463 Fill ALL_BOUNDS output array with created bounds.
1464
1465 OFFS is used for recursive calls and holds basic
1466 offset of TYPE in outer structure in bits.
1467
1468 ITER points a position where bounds are searched.
1469
1470 ALL_BOUNDS[i] is filled with elem bounds if there
1471 is a field in TYPE which has pointer type and offset
1472 equal to i * POINTER_SIZE in bits. */
1473 static void
1474 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1475 HOST_WIDE_INT offs,
1476 gimple_stmt_iterator *iter)
1477 {
1478 tree type = TREE_TYPE (elem);
1479
1480 if (BOUNDED_TYPE_P (type))
1481 {
1482 if (!all_bounds[offs / POINTER_SIZE])
1483 {
1484 tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
1485 gimple assign = gimple_build_assign (temp, elem);
1486 gimple_stmt_iterator gsi;
1487
1488 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1489 gsi = gsi_for_stmt (assign);
1490
1491 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1492 }
1493 }
1494 else if (RECORD_OR_UNION_TYPE_P (type))
1495 {
1496 tree field;
1497
1498 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1499 if (TREE_CODE (field) == FIELD_DECL)
1500 {
1501 tree base = unshare_expr (elem);
1502 tree field_ref = chkp_build_component_ref (base, field);
1503 HOST_WIDE_INT field_offs
1504 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1505 if (DECL_FIELD_OFFSET (field))
1506 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1507
1508 chkp_find_bounds_for_elem (field_ref, all_bounds,
1509 offs + field_offs, iter);
1510 }
1511 }
1512 else if (TREE_CODE (type) == ARRAY_TYPE)
1513 {
1514 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1515 tree etype = TREE_TYPE (type);
1516 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1517 unsigned HOST_WIDE_INT cur;
1518
1519 if (!maxval || integer_minus_onep (maxval))
1520 return;
1521
1522 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1523 {
1524 tree base = unshare_expr (elem);
1525 tree arr_elem = chkp_build_array_ref (base, etype,
1526 TYPE_SIZE (etype),
1527 cur);
1528 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1529 iter);
1530 }
1531 }
1532 }
1533
1534 /* Fill HAVE_BOUND output bitmap with information about
1535 bounds requred for object of type TYPE.
1536
1537 OFFS is used for recursive calls and holds basic
1538 offset of TYPE in outer structure in bits.
1539
1540 HAVE_BOUND[i] is set to 1 if there is a field
1541 in TYPE which has pointer type and offset
1542 equal to i * POINTER_SIZE - OFFS in bits. */
1543 void
1544 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1545 HOST_WIDE_INT offs)
1546 {
1547 if (BOUNDED_TYPE_P (type))
1548 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1549 else if (RECORD_OR_UNION_TYPE_P (type))
1550 {
1551 tree field;
1552
1553 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1554 if (TREE_CODE (field) == FIELD_DECL)
1555 {
1556 HOST_WIDE_INT field_offs
1557 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1558 if (DECL_FIELD_OFFSET (field))
1559 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1560 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1561 offs + field_offs);
1562 }
1563 }
1564 else if (TREE_CODE (type) == ARRAY_TYPE)
1565 {
1566 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1567 tree etype = TREE_TYPE (type);
1568 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1569 unsigned HOST_WIDE_INT cur;
1570
1571 if (!maxval || integer_minus_onep (maxval))
1572 return;
1573
1574 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1575 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1576 }
1577 }
1578
1579 /* Fill bitmap RES with information about bounds for
1580 type TYPE. See chkp_find_bound_slots_1 for more
1581 details. */
1582 void
1583 chkp_find_bound_slots (const_tree type, bitmap res)
1584 {
1585 bitmap_clear (res);
1586 chkp_find_bound_slots_1 (type, res, 0);
1587 }
1588
1589 /* Return 1 if call to FNDECL should be instrumented
1590 and 0 otherwise. */
1591
1592 static bool
1593 chkp_instrument_normal_builtin (tree fndecl)
1594 {
1595 switch (DECL_FUNCTION_CODE (fndecl))
1596 {
1597 case BUILT_IN_STRLEN:
1598 case BUILT_IN_STRCPY:
1599 case BUILT_IN_STRNCPY:
1600 case BUILT_IN_STPCPY:
1601 case BUILT_IN_STPNCPY:
1602 case BUILT_IN_STRCAT:
1603 case BUILT_IN_STRNCAT:
1604 case BUILT_IN_MEMCPY:
1605 case BUILT_IN_MEMPCPY:
1606 case BUILT_IN_MEMSET:
1607 case BUILT_IN_MEMMOVE:
1608 case BUILT_IN_BZERO:
1609 case BUILT_IN_STRCMP:
1610 case BUILT_IN_STRNCMP:
1611 case BUILT_IN_BCMP:
1612 case BUILT_IN_MEMCMP:
1613 case BUILT_IN_MEMCPY_CHK:
1614 case BUILT_IN_MEMPCPY_CHK:
1615 case BUILT_IN_MEMMOVE_CHK:
1616 case BUILT_IN_MEMSET_CHK:
1617 case BUILT_IN_STRCPY_CHK:
1618 case BUILT_IN_STRNCPY_CHK:
1619 case BUILT_IN_STPCPY_CHK:
1620 case BUILT_IN_STPNCPY_CHK:
1621 case BUILT_IN_STRCAT_CHK:
1622 case BUILT_IN_STRNCAT_CHK:
1623 case BUILT_IN_MALLOC:
1624 case BUILT_IN_CALLOC:
1625 case BUILT_IN_REALLOC:
1626 return 1;
1627
1628 default:
1629 return 0;
1630 }
1631 }
1632
1633 /* Add bound arguments to call statement pointed by GSI.
1634 Also performs a replacement of user checker builtins calls
1635 with internal ones. */
1636
1637 static void
1638 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1639 {
1640 gimple call = gsi_stmt (*gsi);
1641 unsigned arg_no = 0;
1642 tree fndecl = gimple_call_fndecl (call);
1643 tree fntype;
1644 tree first_formal_arg;
1645 tree arg;
1646 bool use_fntype = false;
1647 tree op;
1648 ssa_op_iter iter;
1649 gimple new_call;
1650
1651 /* Do nothing for internal functions. */
1652 if (gimple_call_internal_p (call))
1653 return;
1654
1655 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1656
1657 /* Do nothing if back-end builtin is called. */
1658 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1659 return;
1660
1661 /* Do nothing for some middle-end builtins. */
1662 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1663 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1664 return;
1665
1666 /* Do nothing for calls to legacy functions. */
1667 if (fndecl
1668 && lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (fndecl)))
1669 return;
1670
1671 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1672 and CHKP_COPY_PTR_BOUNDS. */
1673 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1674 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1675 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1676 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1677 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1678 return;
1679
1680 /* Check user builtins are replaced with checks. */
1681 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1682 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1683 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1684 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1685 {
1686 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1687 return;
1688 }
1689
1690 /* Check user builtins are replaced with bound extract. */
1691 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1692 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1693 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1694 {
1695 chkp_replace_extract_builtin (gsi);
1696 return;
1697 }
1698
1699 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1700 target narrow bounds call. */
1701 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1702 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1703 {
1704 tree arg = gimple_call_arg (call, 1);
1705 tree bounds = chkp_find_bounds (arg, gsi);
1706
1707 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1708 gimple_call_set_arg (call, 1, bounds);
1709 update_stmt (call);
1710
1711 return;
1712 }
1713
1714 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1715 bndstx call. */
1716 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1717 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1718 {
1719 tree addr = gimple_call_arg (call, 0);
1720 tree ptr = gimple_call_arg (call, 1);
1721 tree bounds = chkp_find_bounds (ptr, gsi);
1722 gimple_stmt_iterator iter = gsi_for_stmt (call);
1723
1724 chkp_build_bndstx (addr, ptr, bounds, gsi);
1725 gsi_remove (&iter, true);
1726
1727 return;
1728 }
1729
1730 if (!flag_chkp_instrument_calls)
1731 return;
1732
1733 /* We instrument only some subset of builtins. We also instrument
1734 builtin calls to be inlined. */
1735 if (fndecl
1736 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1737 && !chkp_instrument_normal_builtin (fndecl))
1738 {
1739 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1740 return;
1741
1742 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1743 if (!clone
1744 || !gimple_has_body_p (clone->decl))
1745 return;
1746 }
1747
1748 /* If function decl is available then use it for
1749 formal arguments list. Otherwise use function type. */
1750 if (fndecl && DECL_ARGUMENTS (fndecl))
1751 first_formal_arg = DECL_ARGUMENTS (fndecl);
1752 else
1753 {
1754 first_formal_arg = TYPE_ARG_TYPES (fntype);
1755 use_fntype = true;
1756 }
1757
1758 /* Fill vector of new call args. */
1759 vec<tree> new_args = vNULL;
1760 new_args.create (gimple_call_num_args (call));
1761 arg = first_formal_arg;
1762 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1763 {
1764 tree call_arg = gimple_call_arg (call, arg_no);
1765 tree type;
1766
1767 /* Get arg type using formal argument description
1768 or actual argument type. */
1769 if (arg)
1770 if (use_fntype)
1771 if (TREE_VALUE (arg) != void_type_node)
1772 {
1773 type = TREE_VALUE (arg);
1774 arg = TREE_CHAIN (arg);
1775 }
1776 else
1777 type = TREE_TYPE (call_arg);
1778 else
1779 {
1780 type = TREE_TYPE (arg);
1781 arg = TREE_CHAIN (arg);
1782 }
1783 else
1784 type = TREE_TYPE (call_arg);
1785
1786 new_args.safe_push (call_arg);
1787
1788 if (BOUNDED_TYPE_P (type)
1789 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1790 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1791 else if (chkp_type_has_pointer (type))
1792 {
1793 HOST_WIDE_INT max_bounds
1794 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1795 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1796 HOST_WIDE_INT bnd_no;
1797
1798 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1799
1800 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1801
1802 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1803 if (all_bounds[bnd_no])
1804 new_args.safe_push (all_bounds[bnd_no]);
1805
1806 free (all_bounds);
1807 }
1808 }
1809
1810 if (new_args.length () == gimple_call_num_args (call))
1811 new_call = call;
1812 else
1813 {
1814 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1815 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1816 gimple_call_copy_flags (new_call, call);
1817 }
1818 new_args.release ();
1819
1820 /* For direct calls fndecl is replaced with instrumented version. */
1821 if (fndecl)
1822 {
1823 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1824 gimple_call_set_fndecl (new_call, new_decl);
1825 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1826 }
1827 /* For indirect call we should fix function pointer type if
1828 pass some bounds. */
1829 else if (new_call != call)
1830 {
1831 tree type = gimple_call_fntype (call);
1832 type = chkp_copy_function_type_adding_bounds (type);
1833 gimple_call_set_fntype (new_call, type);
1834 }
1835
1836 /* replace old call statement with the new one. */
1837 if (call != new_call)
1838 {
1839 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1840 {
1841 SSA_NAME_DEF_STMT (op) = new_call;
1842 }
1843 gsi_replace (gsi, new_call, true);
1844 }
1845 else
1846 update_stmt (new_call);
1847
1848 gimple_call_set_with_bounds (new_call, true);
1849 }
1850
1851 /* Return constant static bounds var with specified LB and UB
1852 if such var exists in varpool. Return NULL otherwise. */
1853 static tree
1854 chkp_find_const_bounds_var (HOST_WIDE_INT lb,
1855 HOST_WIDE_INT ub)
1856 {
1857 tree val = targetm.chkp_make_bounds_constant (lb, ub);
1858 struct varpool_node *node;
1859
1860 /* We expect bounds constant is represented as a complex value
1861 of two pointer sized integers. */
1862 gcc_assert (TREE_CODE (val) == COMPLEX_CST);
1863
1864 FOR_EACH_VARIABLE (node)
1865 if (POINTER_BOUNDS_P (node->decl)
1866 && TREE_READONLY (node->decl)
1867 && DECL_INITIAL (node->decl)
1868 && TREE_CODE (DECL_INITIAL (node->decl)) == COMPLEX_CST
1869 && tree_int_cst_equal (TREE_REALPART (DECL_INITIAL (node->decl)),
1870 TREE_REALPART (val))
1871 && tree_int_cst_equal (TREE_IMAGPART (DECL_INITIAL (node->decl)),
1872 TREE_IMAGPART (val)))
1873 return node->decl;
1874
1875 return NULL;
1876 }
1877
1878 /* Return constant static bounds var with specified bounds LB and UB.
1879 If such var does not exists then new var is created with specified NAME. */
1880 static tree
1881 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1882 HOST_WIDE_INT ub,
1883 const char *name)
1884 {
1885 tree var;
1886
1887 /* With LTO we may have constant bounds already in varpool.
1888 Try to find it. */
1889 var = chkp_find_const_bounds_var (lb, ub);
1890
1891 if (var)
1892 return var;
1893
1894 var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
1895 get_identifier (name), pointer_bounds_type_node);
1896
1897 TREE_PUBLIC (var) = 1;
1898 TREE_USED (var) = 1;
1899 TREE_READONLY (var) = 1;
1900 TREE_STATIC (var) = 1;
1901 TREE_ADDRESSABLE (var) = 0;
1902 DECL_ARTIFICIAL (var) = 1;
1903 DECL_READ_P (var) = 1;
1904 /* We may use this symbol during ctors generation in chkp_finish_file
1905 when all symbols are emitted. Force output to avoid undefined
1906 symbols in ctors. */
1907 if (!in_lto_p)
1908 {
1909 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1910 DECL_COMDAT (var) = 1;
1911 varpool_node::get_create (var)->set_comdat_group (DECL_ASSEMBLER_NAME (var));
1912 varpool_node::get_create (var)->force_output = 1;
1913 }
1914 else
1915 DECL_EXTERNAL (var) = 1;
1916 varpool_node::finalize_decl (var);
1917
1918 return var;
1919 }
1920
1921 /* Generate code to make bounds with specified lower bound LB and SIZE.
1922 if AFTER is 1 then code is inserted after position pointed by ITER
1923 otherwise code is inserted before position pointed by ITER.
1924 If ITER is NULL then code is added to entry block. */
1925 static tree
1926 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
1927 {
1928 gimple_seq seq;
1929 gimple_stmt_iterator gsi;
1930 gimple stmt;
1931 tree bounds;
1932
1933 if (iter)
1934 gsi = *iter;
1935 else
1936 gsi = gsi_start_bb (chkp_get_entry_block ());
1937
1938 seq = NULL;
1939
1940 lb = chkp_force_gimple_call_op (lb, &seq);
1941 size = chkp_force_gimple_call_op (size, &seq);
1942
1943 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
1944 chkp_mark_stmt (stmt);
1945
1946 bounds = chkp_get_tmp_reg (stmt);
1947 gimple_call_set_lhs (stmt, bounds);
1948
1949 gimple_seq_add_stmt (&seq, stmt);
1950
1951 if (iter && after)
1952 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
1953 else
1954 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1955
1956 if (dump_file && (dump_flags & TDF_DETAILS))
1957 {
1958 fprintf (dump_file, "Made bounds: ");
1959 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
1960 if (iter)
1961 {
1962 fprintf (dump_file, " inserted before statement: ");
1963 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
1964 }
1965 else
1966 fprintf (dump_file, " at function entry\n");
1967 }
1968
1969 /* update_stmt (stmt); */
1970
1971 return bounds;
1972 }
1973
1974 /* Return var holding zero bounds. */
1975 tree
1976 chkp_get_zero_bounds_var (void)
1977 {
1978 if (!chkp_zero_bounds_var)
1979 {
1980 tree id = get_identifier (CHKP_ZERO_BOUNDS_VAR_NAME);
1981 symtab_node *node = symtab_node::get_for_asmname (id);
1982 if (node)
1983 chkp_zero_bounds_var = node->decl;
1984 }
1985
1986 if (!chkp_zero_bounds_var)
1987 chkp_zero_bounds_var
1988 = chkp_make_static_const_bounds (0, -1,
1989 CHKP_ZERO_BOUNDS_VAR_NAME);
1990 return chkp_zero_bounds_var;
1991 }
1992
1993 /* Return var holding none bounds. */
1994 tree
1995 chkp_get_none_bounds_var (void)
1996 {
1997 if (!chkp_none_bounds_var)
1998 {
1999 tree id = get_identifier (CHKP_NONE_BOUNDS_VAR_NAME);
2000 symtab_node *node = symtab_node::get_for_asmname (id);
2001 if (node)
2002 chkp_none_bounds_var = node->decl;
2003 }
2004
2005 if (!chkp_none_bounds_var)
2006 chkp_none_bounds_var
2007 = chkp_make_static_const_bounds (-1, 0,
2008 CHKP_NONE_BOUNDS_VAR_NAME);
2009 return chkp_none_bounds_var;
2010 }
2011
2012 /* Return SSA_NAME used to represent zero bounds. */
2013 static tree
2014 chkp_get_zero_bounds (void)
2015 {
2016 if (zero_bounds)
2017 return zero_bounds;
2018
2019 if (dump_file && (dump_flags & TDF_DETAILS))
2020 fprintf (dump_file, "Creating zero bounds...");
2021
2022 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2023 || flag_chkp_use_static_const_bounds > 0)
2024 {
2025 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2026 gimple stmt;
2027
2028 zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2029 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2030 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2031 }
2032 else
2033 zero_bounds = chkp_make_bounds (integer_zero_node,
2034 integer_zero_node,
2035 NULL,
2036 false);
2037
2038 return zero_bounds;
2039 }
2040
2041 /* Return SSA_NAME used to represent none bounds. */
2042 static tree
2043 chkp_get_none_bounds (void)
2044 {
2045 if (none_bounds)
2046 return none_bounds;
2047
2048 if (dump_file && (dump_flags & TDF_DETAILS))
2049 fprintf (dump_file, "Creating none bounds...");
2050
2051
2052 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2053 || flag_chkp_use_static_const_bounds > 0)
2054 {
2055 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2056 gimple stmt;
2057
2058 none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
2059 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2060 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2061 }
2062 else
2063 none_bounds = chkp_make_bounds (integer_minus_one_node,
2064 build_int_cst (size_type_node, 2),
2065 NULL,
2066 false);
2067
2068 return none_bounds;
2069 }
2070
2071 /* Return bounds to be used as a result of operation which
2072 should not create poiunter (e.g. MULT_EXPR). */
2073 static tree
2074 chkp_get_invalid_op_bounds (void)
2075 {
2076 return chkp_get_zero_bounds ();
2077 }
2078
2079 /* Return bounds to be used for loads of non-pointer values. */
2080 static tree
2081 chkp_get_nonpointer_load_bounds (void)
2082 {
2083 return chkp_get_zero_bounds ();
2084 }
2085
2086 /* Build bounds returned by CALL. */
2087 static tree
2088 chkp_build_returned_bound (gimple call)
2089 {
2090 gimple_stmt_iterator gsi;
2091 tree bounds;
2092 gimple stmt;
2093 tree fndecl = gimple_call_fndecl (call);
2094
2095 /* To avoid fixing alloca expands in targets we handle
2096 it separately. */
2097 if (fndecl
2098 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2099 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2100 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2101 {
2102 tree size = gimple_call_arg (call, 0);
2103 tree lb = gimple_call_lhs (call);
2104 gimple_stmt_iterator iter = gsi_for_stmt (call);
2105 bounds = chkp_make_bounds (lb, size, &iter, true);
2106 }
2107 /* We know bounds returned by set_bounds builtin call. */
2108 else if (fndecl
2109 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2110 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2111 {
2112 tree lb = gimple_call_arg (call, 0);
2113 tree size = gimple_call_arg (call, 1);
2114 gimple_stmt_iterator iter = gsi_for_stmt (call);
2115 bounds = chkp_make_bounds (lb, size, &iter, true);
2116 }
2117 /* Detect bounds initialization calls. */
2118 else if (fndecl
2119 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2120 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2121 bounds = chkp_get_zero_bounds ();
2122 /* Detect bounds nullification calls. */
2123 else if (fndecl
2124 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2125 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2126 bounds = chkp_get_none_bounds ();
2127 /* Detect bounds copy calls. */
2128 else if (fndecl
2129 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2130 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2131 {
2132 gimple_stmt_iterator iter = gsi_for_stmt (call);
2133 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2134 }
2135 /* Do not use retbnd when returned bounds are equal to some
2136 of passed bounds. */
2137 else if ((gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2138 || gimple_call_builtin_p (call, BUILT_IN_STRCHR))
2139 {
2140 gimple_stmt_iterator iter = gsi_for_stmt (call);
2141 unsigned int retarg = 0, argno;
2142 if (gimple_call_return_flags (call) & ERF_RETURNS_ARG)
2143 retarg = gimple_call_return_flags (call) & ERF_RETURN_ARG_MASK;
2144 if (gimple_call_with_bounds_p (call))
2145 {
2146 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2147 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2148 {
2149 if (retarg)
2150 retarg--;
2151 else
2152 break;
2153 }
2154 }
2155 else
2156 argno = retarg;
2157
2158 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2159 }
2160 else
2161 {
2162 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2163
2164 /* In general case build checker builtin call to
2165 obtain returned bounds. */
2166 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2167 gimple_call_lhs (call));
2168 chkp_mark_stmt (stmt);
2169
2170 gsi = gsi_for_stmt (call);
2171 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2172
2173 bounds = chkp_get_tmp_reg (stmt);
2174 gimple_call_set_lhs (stmt, bounds);
2175
2176 update_stmt (stmt);
2177 }
2178
2179 if (dump_file && (dump_flags & TDF_DETAILS))
2180 {
2181 fprintf (dump_file, "Built returned bounds (");
2182 print_generic_expr (dump_file, bounds, 0);
2183 fprintf (dump_file, ") for call: ");
2184 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2185 }
2186
2187 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2188
2189 return bounds;
2190 }
2191
2192 /* Return bounds used as returned by call
2193 which produced SSA name VAL. */
2194 gimple
2195 chkp_retbnd_call_by_val (tree val)
2196 {
2197 if (TREE_CODE (val) != SSA_NAME)
2198 return NULL;
2199
2200 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2201
2202 imm_use_iterator use_iter;
2203 use_operand_p use_p;
2204 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2205 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2206 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2207 return USE_STMT (use_p);
2208
2209 return NULL;
2210 }
2211
2212 /* Check the next parameter for the given PARM is bounds
2213 and return it's default SSA_NAME (create if required). */
2214 static tree
2215 chkp_get_next_bounds_parm (tree parm)
2216 {
2217 tree bounds = TREE_CHAIN (parm);
2218 gcc_assert (POINTER_BOUNDS_P (bounds));
2219 bounds = ssa_default_def (cfun, bounds);
2220 if (!bounds)
2221 {
2222 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2223 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2224 }
2225 return bounds;
2226 }
2227
2228 /* Return bounds to be used for input argument PARM. */
2229 static tree
2230 chkp_get_bound_for_parm (tree parm)
2231 {
2232 tree decl = SSA_NAME_VAR (parm);
2233 tree bounds;
2234
2235 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2236
2237 bounds = chkp_get_registered_bounds (parm);
2238
2239 if (!bounds)
2240 bounds = chkp_get_registered_bounds (decl);
2241
2242 if (!bounds)
2243 {
2244 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2245
2246 /* For static chain param we return zero bounds
2247 because currently we do not check dereferences
2248 of this pointer. */
2249 if (cfun->static_chain_decl == decl)
2250 bounds = chkp_get_zero_bounds ();
2251 /* If non instrumented runtime is used then it may be useful
2252 to use zero bounds for input arguments of main
2253 function. */
2254 else if (flag_chkp_zero_input_bounds_for_main
2255 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2256 "main") == 0)
2257 bounds = chkp_get_zero_bounds ();
2258 else if (BOUNDED_P (parm))
2259 {
2260 bounds = chkp_get_next_bounds_parm (decl);
2261 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2262
2263 if (dump_file && (dump_flags & TDF_DETAILS))
2264 {
2265 fprintf (dump_file, "Built arg bounds (");
2266 print_generic_expr (dump_file, bounds, 0);
2267 fprintf (dump_file, ") for arg: ");
2268 print_node (dump_file, "", decl, 0);
2269 }
2270 }
2271 else
2272 bounds = chkp_get_zero_bounds ();
2273 }
2274
2275 if (!chkp_get_registered_bounds (parm))
2276 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2277
2278 if (dump_file && (dump_flags & TDF_DETAILS))
2279 {
2280 fprintf (dump_file, "Using bounds ");
2281 print_generic_expr (dump_file, bounds, 0);
2282 fprintf (dump_file, " for parm ");
2283 print_generic_expr (dump_file, parm, 0);
2284 fprintf (dump_file, " of type ");
2285 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2286 fprintf (dump_file, ".\n");
2287 }
2288
2289 return bounds;
2290 }
2291
2292 /* Build and return CALL_EXPR for bndstx builtin with specified
2293 arguments. */
2294 tree
2295 chkp_build_bndldx_call (tree addr, tree ptr)
2296 {
2297 tree fn = build1 (ADDR_EXPR,
2298 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2299 chkp_bndldx_fndecl);
2300 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2301 fn, 2, addr, ptr);
2302 CALL_WITH_BOUNDS_P (call) = true;
2303 return call;
2304 }
2305
2306 /* Insert code to load bounds for PTR located by ADDR.
2307 Code is inserted after position pointed by GSI.
2308 Loaded bounds are returned. */
2309 static tree
2310 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2311 {
2312 gimple_seq seq;
2313 gimple stmt;
2314 tree bounds;
2315
2316 seq = NULL;
2317
2318 addr = chkp_force_gimple_call_op (addr, &seq);
2319 ptr = chkp_force_gimple_call_op (ptr, &seq);
2320
2321 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2322 chkp_mark_stmt (stmt);
2323 bounds = chkp_get_tmp_reg (stmt);
2324 gimple_call_set_lhs (stmt, bounds);
2325
2326 gimple_seq_add_stmt (&seq, stmt);
2327
2328 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2329
2330 if (dump_file && (dump_flags & TDF_DETAILS))
2331 {
2332 fprintf (dump_file, "Generated bndldx for pointer ");
2333 print_generic_expr (dump_file, ptr, 0);
2334 fprintf (dump_file, ": ");
2335 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2336 }
2337
2338 return bounds;
2339 }
2340
2341 /* Build and return CALL_EXPR for bndstx builtin with specified
2342 arguments. */
2343 tree
2344 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2345 {
2346 tree fn = build1 (ADDR_EXPR,
2347 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2348 chkp_bndstx_fndecl);
2349 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2350 fn, 3, ptr, bounds, addr);
2351 CALL_WITH_BOUNDS_P (call) = true;
2352 return call;
2353 }
2354
2355 /* Insert code to store BOUNDS for PTR stored by ADDR.
2356 New statements are inserted after position pointed
2357 by GSI. */
2358 void
2359 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2360 gimple_stmt_iterator *gsi)
2361 {
2362 gimple_seq seq;
2363 gimple stmt;
2364
2365 seq = NULL;
2366
2367 addr = chkp_force_gimple_call_op (addr, &seq);
2368 ptr = chkp_force_gimple_call_op (ptr, &seq);
2369
2370 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2371 chkp_mark_stmt (stmt);
2372 gimple_call_set_with_bounds (stmt, true);
2373
2374 gimple_seq_add_stmt (&seq, stmt);
2375
2376 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2377
2378 if (dump_file && (dump_flags & TDF_DETAILS))
2379 {
2380 fprintf (dump_file, "Generated bndstx for pointer store ");
2381 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2382 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2383 }
2384 }
2385
2386 /* Compute bounds for pointer NODE which was assigned in
2387 assignment statement ASSIGN. Return computed bounds. */
2388 static tree
2389 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2390 {
2391 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2392 tree rhs1 = gimple_assign_rhs1 (assign);
2393 tree bounds = NULL_TREE;
2394 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2395
2396 if (dump_file && (dump_flags & TDF_DETAILS))
2397 {
2398 fprintf (dump_file, "Computing bounds for assignment: ");
2399 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2400 }
2401
2402 switch (rhs_code)
2403 {
2404 case MEM_REF:
2405 case TARGET_MEM_REF:
2406 case COMPONENT_REF:
2407 case ARRAY_REF:
2408 /* We need to load bounds from the bounds table. */
2409 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2410 break;
2411
2412 case VAR_DECL:
2413 case SSA_NAME:
2414 case ADDR_EXPR:
2415 case POINTER_PLUS_EXPR:
2416 case NOP_EXPR:
2417 case CONVERT_EXPR:
2418 case INTEGER_CST:
2419 /* Bounds are just propagated from RHS. */
2420 bounds = chkp_find_bounds (rhs1, &iter);
2421 break;
2422
2423 case VIEW_CONVERT_EXPR:
2424 /* Bounds are just propagated from RHS. */
2425 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2426 break;
2427
2428 case PARM_DECL:
2429 if (BOUNDED_P (rhs1))
2430 {
2431 /* We need to load bounds from the bounds table. */
2432 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2433 node, &iter);
2434 TREE_ADDRESSABLE (rhs1) = 1;
2435 }
2436 else
2437 bounds = chkp_get_nonpointer_load_bounds ();
2438 break;
2439
2440 case MINUS_EXPR:
2441 case PLUS_EXPR:
2442 case BIT_AND_EXPR:
2443 case BIT_IOR_EXPR:
2444 case BIT_XOR_EXPR:
2445 {
2446 tree rhs2 = gimple_assign_rhs2 (assign);
2447 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2448 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2449
2450 /* First we try to check types of operands. If it
2451 does not help then look at bound values.
2452
2453 If some bounds are incomplete and other are
2454 not proven to be valid (i.e. also incomplete
2455 or invalid because value is not pointer) then
2456 resulting value is incomplete and will be
2457 recomputed later in chkp_finish_incomplete_bounds. */
2458 if (BOUNDED_P (rhs1)
2459 && !BOUNDED_P (rhs2))
2460 bounds = bnd1;
2461 else if (BOUNDED_P (rhs2)
2462 && !BOUNDED_P (rhs1)
2463 && rhs_code != MINUS_EXPR)
2464 bounds = bnd2;
2465 else if (chkp_incomplete_bounds (bnd1))
2466 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2467 && !chkp_incomplete_bounds (bnd2))
2468 bounds = bnd2;
2469 else
2470 bounds = incomplete_bounds;
2471 else if (chkp_incomplete_bounds (bnd2))
2472 if (chkp_valid_bounds (bnd1)
2473 && !chkp_incomplete_bounds (bnd1))
2474 bounds = bnd1;
2475 else
2476 bounds = incomplete_bounds;
2477 else if (!chkp_valid_bounds (bnd1))
2478 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2479 bounds = bnd2;
2480 else if (bnd2 == chkp_get_zero_bounds ())
2481 bounds = bnd2;
2482 else
2483 bounds = bnd1;
2484 else if (!chkp_valid_bounds (bnd2))
2485 bounds = bnd1;
2486 else
2487 /* Seems both operands may have valid bounds
2488 (e.g. pointer minus pointer). In such case
2489 use default invalid op bounds. */
2490 bounds = chkp_get_invalid_op_bounds ();
2491 }
2492 break;
2493
2494 case BIT_NOT_EXPR:
2495 case NEGATE_EXPR:
2496 case LSHIFT_EXPR:
2497 case RSHIFT_EXPR:
2498 case LROTATE_EXPR:
2499 case RROTATE_EXPR:
2500 case EQ_EXPR:
2501 case NE_EXPR:
2502 case LT_EXPR:
2503 case LE_EXPR:
2504 case GT_EXPR:
2505 case GE_EXPR:
2506 case MULT_EXPR:
2507 case RDIV_EXPR:
2508 case TRUNC_DIV_EXPR:
2509 case FLOOR_DIV_EXPR:
2510 case CEIL_DIV_EXPR:
2511 case ROUND_DIV_EXPR:
2512 case TRUNC_MOD_EXPR:
2513 case FLOOR_MOD_EXPR:
2514 case CEIL_MOD_EXPR:
2515 case ROUND_MOD_EXPR:
2516 case EXACT_DIV_EXPR:
2517 case FIX_TRUNC_EXPR:
2518 case FLOAT_EXPR:
2519 case REALPART_EXPR:
2520 case IMAGPART_EXPR:
2521 /* No valid bounds may be produced by these exprs. */
2522 bounds = chkp_get_invalid_op_bounds ();
2523 break;
2524
2525 case COND_EXPR:
2526 {
2527 tree val1 = gimple_assign_rhs2 (assign);
2528 tree val2 = gimple_assign_rhs3 (assign);
2529 tree bnd1 = chkp_find_bounds (val1, &iter);
2530 tree bnd2 = chkp_find_bounds (val2, &iter);
2531 gimple stmt;
2532
2533 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2534 bounds = incomplete_bounds;
2535 else if (bnd1 == bnd2)
2536 bounds = bnd1;
2537 else
2538 {
2539 rhs1 = unshare_expr (rhs1);
2540
2541 bounds = chkp_get_tmp_reg (assign);
2542 stmt = gimple_build_assign_with_ops (COND_EXPR, bounds,
2543 rhs1, bnd1, bnd2);
2544 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2545
2546 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2547 chkp_mark_invalid_bounds (bounds);
2548 }
2549 }
2550 break;
2551
2552 case MAX_EXPR:
2553 case MIN_EXPR:
2554 {
2555 tree rhs2 = gimple_assign_rhs2 (assign);
2556 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2557 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2558
2559 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2560 bounds = incomplete_bounds;
2561 else if (bnd1 == bnd2)
2562 bounds = bnd1;
2563 else
2564 {
2565 gimple stmt;
2566 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2567 boolean_type_node, rhs1, rhs2);
2568 bounds = chkp_get_tmp_reg (assign);
2569 stmt = gimple_build_assign_with_ops (COND_EXPR, bounds,
2570 cond, bnd1, bnd2);
2571
2572 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2573
2574 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2575 chkp_mark_invalid_bounds (bounds);
2576 }
2577 }
2578 break;
2579
2580 default:
2581 bounds = chkp_get_zero_bounds ();
2582 warning (0, "pointer bounds were lost due to unexpected expression %s",
2583 get_tree_code_name (rhs_code));
2584 }
2585
2586 gcc_assert (bounds);
2587
2588 if (node)
2589 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2590
2591 return bounds;
2592 }
2593
2594 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2595
2596 There are just few statement codes allowed: NOP (for default ssa names),
2597 ASSIGN, CALL, PHI, ASM.
2598
2599 Return computed bounds. */
2600 static tree
2601 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2602 gimple_stmt_iterator *iter)
2603 {
2604 tree var, bounds;
2605 enum gimple_code code = gimple_code (def_stmt);
2606 gimple stmt;
2607
2608 if (dump_file && (dump_flags & TDF_DETAILS))
2609 {
2610 fprintf (dump_file, "Searching for bounds for node: ");
2611 print_generic_expr (dump_file, node, 0);
2612
2613 fprintf (dump_file, " using its definition: ");
2614 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2615 }
2616
2617 switch (code)
2618 {
2619 case GIMPLE_NOP:
2620 var = SSA_NAME_VAR (node);
2621 switch (TREE_CODE (var))
2622 {
2623 case PARM_DECL:
2624 bounds = chkp_get_bound_for_parm (node);
2625 break;
2626
2627 case VAR_DECL:
2628 /* For uninitialized pointers use none bounds. */
2629 bounds = chkp_get_none_bounds ();
2630 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2631 break;
2632
2633 case RESULT_DECL:
2634 {
2635 tree base_type;
2636
2637 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2638
2639 base_type = TREE_TYPE (TREE_TYPE (node));
2640
2641 gcc_assert (TYPE_SIZE (base_type)
2642 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2643 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2644
2645 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2646 NULL, false);
2647 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2648 }
2649 break;
2650
2651 default:
2652 if (dump_file && (dump_flags & TDF_DETAILS))
2653 {
2654 fprintf (dump_file, "Unexpected var with no definition\n");
2655 print_generic_expr (dump_file, var, 0);
2656 }
2657 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2658 get_tree_code_name (TREE_CODE (var)));
2659 }
2660 break;
2661
2662 case GIMPLE_ASSIGN:
2663 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2664 break;
2665
2666 case GIMPLE_CALL:
2667 bounds = chkp_build_returned_bound (def_stmt);
2668 break;
2669
2670 case GIMPLE_PHI:
2671 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2672 if (SSA_NAME_VAR (node))
2673 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2674 else
2675 var = make_temp_ssa_name (pointer_bounds_type_node,
2676 gimple_build_nop (),
2677 CHKP_BOUND_TMP_NAME);
2678 else
2679 var = chkp_get_tmp_var ();
2680 stmt = create_phi_node (var, gimple_bb (def_stmt));
2681 bounds = gimple_phi_result (stmt);
2682 *iter = gsi_for_stmt (stmt);
2683
2684 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2685
2686 /* Created bounds do not have all phi args computed and
2687 therefore we do not know if there is a valid source
2688 of bounds for that node. Therefore we mark bounds
2689 as incomplete and then recompute them when all phi
2690 args are computed. */
2691 chkp_register_incomplete_bounds (bounds, node);
2692 break;
2693
2694 case GIMPLE_ASM:
2695 bounds = chkp_get_zero_bounds ();
2696 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2697 break;
2698
2699 default:
2700 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2701 gimple_code_name[code]);
2702 }
2703
2704 return bounds;
2705 }
2706
2707 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2708 tree
2709 chkp_build_make_bounds_call (tree lower_bound, tree size)
2710 {
2711 tree call = build1 (ADDR_EXPR,
2712 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2713 chkp_bndmk_fndecl);
2714 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2715 call, 2, lower_bound, size);
2716 }
2717
2718 /* Create static bounds var of specfified OBJ which is
2719 is either VAR_DECL or string constant. */
2720 static tree
2721 chkp_make_static_bounds (tree obj)
2722 {
2723 static int string_id = 1;
2724 static int var_id = 1;
2725 tree *slot;
2726 const char *var_name;
2727 char *bnd_var_name;
2728 tree bnd_var;
2729
2730 /* First check if we already have required var. */
2731 if (chkp_static_var_bounds)
2732 {
2733 slot = chkp_static_var_bounds->get (obj);
2734 if (slot)
2735 return *slot;
2736 }
2737
2738 /* Build decl for bounds var. */
2739 if (TREE_CODE (obj) == VAR_DECL)
2740 {
2741 if (DECL_IGNORED_P (obj))
2742 {
2743 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2744 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2745 }
2746 else
2747 {
2748 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2749
2750 /* For hidden symbols we want to skip first '*' char. */
2751 if (*var_name == '*')
2752 var_name++;
2753
2754 bnd_var_name = (char *) xmalloc (strlen (var_name)
2755 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2756 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2757 strcat (bnd_var_name, var_name);
2758 }
2759
2760 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2761 get_identifier (bnd_var_name),
2762 pointer_bounds_type_node);
2763
2764 /* Address of the obj will be used as lower bound. */
2765 TREE_ADDRESSABLE (obj) = 1;
2766 }
2767 else
2768 {
2769 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2770 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2771
2772 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2773 get_identifier (bnd_var_name),
2774 pointer_bounds_type_node);
2775 }
2776
2777 TREE_PUBLIC (bnd_var) = 0;
2778 TREE_USED (bnd_var) = 1;
2779 TREE_READONLY (bnd_var) = 0;
2780 TREE_STATIC (bnd_var) = 1;
2781 TREE_ADDRESSABLE (bnd_var) = 0;
2782 DECL_ARTIFICIAL (bnd_var) = 1;
2783 DECL_COMMON (bnd_var) = 1;
2784 DECL_COMDAT (bnd_var) = 1;
2785 DECL_READ_P (bnd_var) = 1;
2786 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2787 /* Force output similar to constant bounds.
2788 See chkp_make_static_const_bounds. */
2789 varpool_node::get_create (bnd_var)->force_output = 1;
2790 /* Mark symbol as requiring bounds initialization. */
2791 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2792 varpool_node::finalize_decl (bnd_var);
2793
2794 /* Add created var to the map to use it for other references
2795 to obj. */
2796 if (!chkp_static_var_bounds)
2797 chkp_static_var_bounds = new hash_map<tree, tree>;
2798
2799 chkp_static_var_bounds->put (obj, bnd_var);
2800
2801 return bnd_var;
2802 }
2803
2804 /* When var has incomplete type we cannot get size to
2805 compute its bounds. In such cases we use checker
2806 builtin call which determines object size at runtime. */
2807 static tree
2808 chkp_generate_extern_var_bounds (tree var)
2809 {
2810 tree bounds, size_reloc, lb, size, max_size, cond;
2811 gimple_stmt_iterator gsi;
2812 gimple_seq seq = NULL;
2813 gimple stmt;
2814
2815 /* If instrumentation is not enabled for vars having
2816 incomplete type then just return zero bounds to avoid
2817 checks for this var. */
2818 if (!flag_chkp_incomplete_type)
2819 return chkp_get_zero_bounds ();
2820
2821 if (dump_file && (dump_flags & TDF_DETAILS))
2822 {
2823 fprintf (dump_file, "Generating bounds for extern symbol '");
2824 print_generic_expr (dump_file, var, 0);
2825 fprintf (dump_file, "'\n");
2826 }
2827
2828 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2829
2830 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2831 gimple_call_set_lhs (stmt, size_reloc);
2832
2833 gimple_seq_add_stmt (&seq, stmt);
2834
2835 lb = chkp_build_addr_expr (var);
2836 size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
2837
2838 if (flag_chkp_zero_dynamic_size_as_infinite)
2839 {
2840 /* We should check that size relocation was resolved.
2841 If it was not then use maximum possible size for the var. */
2842 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2843 fold_convert (chkp_uintptr_type, lb));
2844 max_size = chkp_force_gimple_call_op (max_size, &seq);
2845
2846 cond = build2 (NE_EXPR, boolean_type_node, size_reloc, integer_zero_node);
2847 stmt = gimple_build_assign_with_ops (COND_EXPR, size,
2848 cond, size_reloc, max_size);
2849 gimple_seq_add_stmt (&seq, stmt);
2850 }
2851 else
2852 {
2853 stmt = gimple_build_assign (size, size_reloc);
2854 gimple_seq_add_stmt (&seq, stmt);
2855 }
2856
2857 gsi = gsi_start_bb (chkp_get_entry_block ());
2858 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2859
2860 bounds = chkp_make_bounds (lb, size, &gsi, true);
2861
2862 return bounds;
2863 }
2864
2865 /* Return 1 if TYPE has fields with zero size or fields
2866 marked with chkp_variable_size attribute. */
2867 bool
2868 chkp_variable_size_type (tree type)
2869 {
2870 bool res = false;
2871 tree field;
2872
2873 if (RECORD_OR_UNION_TYPE_P (type))
2874 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2875 {
2876 if (TREE_CODE (field) == FIELD_DECL)
2877 res = res
2878 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
2879 || chkp_variable_size_type (TREE_TYPE (field));
2880 }
2881 else
2882 res = !TYPE_SIZE (type)
2883 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
2884 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
2885
2886 return res;
2887 }
2888
2889 /* Compute and return bounds for address of DECL which is
2890 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
2891 static tree
2892 chkp_get_bounds_for_decl_addr (tree decl)
2893 {
2894 tree bounds;
2895
2896 gcc_assert (TREE_CODE (decl) == VAR_DECL
2897 || TREE_CODE (decl) == PARM_DECL
2898 || TREE_CODE (decl) == RESULT_DECL);
2899
2900 bounds = chkp_get_registered_addr_bounds (decl);
2901
2902 if (bounds)
2903 return bounds;
2904
2905 if (dump_file && (dump_flags & TDF_DETAILS))
2906 {
2907 fprintf (dump_file, "Building bounds for address of decl ");
2908 print_generic_expr (dump_file, decl, 0);
2909 fprintf (dump_file, "\n");
2910 }
2911
2912 /* Use zero bounds if size is unknown and checks for
2913 unknown sizes are restricted. */
2914 if ((!DECL_SIZE (decl)
2915 || (chkp_variable_size_type (TREE_TYPE (decl))
2916 && (TREE_STATIC (decl)
2917 || DECL_EXTERNAL (decl)
2918 || TREE_PUBLIC (decl))))
2919 && !flag_chkp_incomplete_type)
2920 return chkp_get_zero_bounds ();
2921
2922 if (flag_chkp_use_static_bounds
2923 && TREE_CODE (decl) == VAR_DECL
2924 && (TREE_STATIC (decl)
2925 || DECL_EXTERNAL (decl)
2926 || TREE_PUBLIC (decl))
2927 && !DECL_THREAD_LOCAL_P (decl))
2928 {
2929 tree bnd_var = chkp_make_static_bounds (decl);
2930 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2931 gimple stmt;
2932
2933 bounds = chkp_get_tmp_reg (gimple_build_nop ());
2934 stmt = gimple_build_assign (bounds, bnd_var);
2935 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2936 }
2937 else if (!DECL_SIZE (decl)
2938 || (chkp_variable_size_type (TREE_TYPE (decl))
2939 && (TREE_STATIC (decl)
2940 || DECL_EXTERNAL (decl)
2941 || TREE_PUBLIC (decl))))
2942 {
2943 gcc_assert (TREE_CODE (decl) == VAR_DECL);
2944 bounds = chkp_generate_extern_var_bounds (decl);
2945 }
2946 else
2947 {
2948 tree lb = chkp_build_addr_expr (decl);
2949 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
2950 }
2951
2952 return bounds;
2953 }
2954
2955 /* Compute and return bounds for constant string. */
2956 static tree
2957 chkp_get_bounds_for_string_cst (tree cst)
2958 {
2959 tree bounds;
2960 tree lb;
2961 tree size;
2962
2963 gcc_assert (TREE_CODE (cst) == STRING_CST);
2964
2965 bounds = chkp_get_registered_bounds (cst);
2966
2967 if (bounds)
2968 return bounds;
2969
2970 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2971 || flag_chkp_use_static_const_bounds > 0)
2972 {
2973 tree bnd_var = chkp_make_static_bounds (cst);
2974 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2975 gimple stmt;
2976
2977 bounds = chkp_get_tmp_reg (gimple_build_nop ());
2978 stmt = gimple_build_assign (bounds, bnd_var);
2979 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2980 }
2981 else
2982 {
2983 lb = chkp_build_addr_expr (cst);
2984 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
2985 bounds = chkp_make_bounds (lb, size, NULL, false);
2986 }
2987
2988 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
2989
2990 return bounds;
2991 }
2992
2993 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
2994 return the result. if ITER is not NULL then Code is inserted
2995 before position pointed by ITER. Otherwise code is added to
2996 entry block. */
2997 static tree
2998 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
2999 {
3000 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3001 return bounds2 ? bounds2 : bounds1;
3002 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3003 return bounds1;
3004 else
3005 {
3006 gimple_seq seq;
3007 gimple stmt;
3008 tree bounds;
3009
3010 seq = NULL;
3011
3012 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3013 chkp_mark_stmt (stmt);
3014
3015 bounds = chkp_get_tmp_reg (stmt);
3016 gimple_call_set_lhs (stmt, bounds);
3017
3018 gimple_seq_add_stmt (&seq, stmt);
3019
3020 /* We are probably doing narrowing for constant expression.
3021 In such case iter may be undefined. */
3022 if (!iter)
3023 {
3024 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3025 iter = &gsi;
3026 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3027 }
3028 else
3029 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3030
3031 if (dump_file && (dump_flags & TDF_DETAILS))
3032 {
3033 fprintf (dump_file, "Bounds intersection: ");
3034 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3035 fprintf (dump_file, " inserted before statement: ");
3036 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3037 TDF_VOPS|TDF_MEMSYMS);
3038 }
3039
3040 return bounds;
3041 }
3042 }
3043
3044 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3045 and 0 othersize. */
3046 static bool
3047 chkp_may_narrow_to_field (tree field)
3048 {
3049 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3050 && tree_to_uhwi (DECL_SIZE (field)) != 0
3051 && (!DECL_FIELD_OFFSET (field)
3052 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3053 && (!DECL_FIELD_BIT_OFFSET (field)
3054 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3055 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3056 && !chkp_variable_size_type (TREE_TYPE (field));
3057 }
3058
3059 /* Return 1 if bounds for FIELD should be narrowed to
3060 field's own size. */
3061 static bool
3062 chkp_narrow_bounds_for_field (tree field)
3063 {
3064 HOST_WIDE_INT offs;
3065 HOST_WIDE_INT bit_offs;
3066
3067 if (!chkp_may_narrow_to_field (field))
3068 return false;
3069
3070 /* Accesse to compiler generated fields should not cause
3071 bounds narrowing. */
3072 if (DECL_ARTIFICIAL (field))
3073 return false;
3074
3075 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3076 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3077
3078 return (flag_chkp_narrow_bounds
3079 && (flag_chkp_first_field_has_own_bounds
3080 || offs
3081 || bit_offs));
3082 }
3083
3084 /* Perform narrowing for BOUNDS using bounds computed for field
3085 access COMPONENT. ITER meaning is the same as for
3086 chkp_intersect_bounds. */
3087 static tree
3088 chkp_narrow_bounds_to_field (tree bounds, tree component,
3089 gimple_stmt_iterator *iter)
3090 {
3091 tree field = TREE_OPERAND (component, 1);
3092 tree size = DECL_SIZE_UNIT (field);
3093 tree field_ptr = chkp_build_addr_expr (component);
3094 tree field_bounds;
3095
3096 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3097
3098 return chkp_intersect_bounds (field_bounds, bounds, iter);
3099 }
3100
3101 /* Parse field or array access NODE.
3102
3103 PTR ouput parameter holds a pointer to the outermost
3104 object.
3105
3106 BITFIELD output parameter is set to 1 if bitfield is
3107 accessed and to 0 otherwise. If it is 1 then ELT holds
3108 outer component for accessed bit field.
3109
3110 SAFE outer parameter is set to 1 if access is safe and
3111 checks are not required.
3112
3113 BOUNDS outer parameter holds bounds to be used to check
3114 access (may be NULL).
3115
3116 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3117 innermost accessed component. */
3118 static void
3119 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3120 tree *elt, bool *safe,
3121 bool *bitfield,
3122 tree *bounds,
3123 gimple_stmt_iterator *iter,
3124 bool innermost_bounds)
3125 {
3126 tree comp_to_narrow = NULL_TREE;
3127 tree last_comp = NULL_TREE;
3128 bool array_ref_found = false;
3129 tree *nodes;
3130 tree var;
3131 int len;
3132 int i;
3133
3134 /* Compute tree height for expression. */
3135 var = node;
3136 len = 1;
3137 while (TREE_CODE (var) == COMPONENT_REF
3138 || TREE_CODE (var) == ARRAY_REF
3139 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3140 {
3141 var = TREE_OPERAND (var, 0);
3142 len++;
3143 }
3144
3145 gcc_assert (len > 1);
3146
3147 /* It is more convenient for us to scan left-to-right,
3148 so walk tree again and put all node to nodes vector
3149 in reversed order. */
3150 nodes = XALLOCAVEC (tree, len);
3151 nodes[len - 1] = node;
3152 for (i = len - 2; i >= 0; i--)
3153 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3154
3155 if (bounds)
3156 *bounds = NULL;
3157 *safe = true;
3158 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3159 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3160 /* To get bitfield address we will need outer elemnt. */
3161 if (*bitfield)
3162 *elt = nodes[len - 2];
3163 else
3164 *elt = NULL_TREE;
3165
3166 /* If we have indirection in expression then compute
3167 outermost structure bounds. Computed bounds may be
3168 narrowed later. */
3169 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3170 {
3171 *safe = false;
3172 *ptr = TREE_OPERAND (nodes[0], 0);
3173 if (bounds)
3174 *bounds = chkp_find_bounds (*ptr, iter);
3175 }
3176 else
3177 {
3178 gcc_assert (TREE_CODE (var) == VAR_DECL
3179 || TREE_CODE (var) == PARM_DECL
3180 || TREE_CODE (var) == RESULT_DECL
3181 || TREE_CODE (var) == STRING_CST
3182 || TREE_CODE (var) == SSA_NAME);
3183
3184 *ptr = chkp_build_addr_expr (var);
3185 }
3186
3187 /* In this loop we are trying to find a field access
3188 requiring narrowing. There are two simple rules
3189 for search:
3190 1. Leftmost array_ref is chosen if any.
3191 2. Rightmost suitable component_ref is chosen if innermost
3192 bounds are required and no array_ref exists. */
3193 for (i = 1; i < len; i++)
3194 {
3195 var = nodes[i];
3196
3197 if (TREE_CODE (var) == ARRAY_REF)
3198 {
3199 *safe = false;
3200 array_ref_found = true;
3201 if (flag_chkp_narrow_bounds
3202 && !flag_chkp_narrow_to_innermost_arrray
3203 && (!last_comp
3204 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3205 {
3206 comp_to_narrow = last_comp;
3207 break;
3208 }
3209 }
3210 else if (TREE_CODE (var) == COMPONENT_REF)
3211 {
3212 tree field = TREE_OPERAND (var, 1);
3213
3214 if (innermost_bounds
3215 && !array_ref_found
3216 && chkp_narrow_bounds_for_field (field))
3217 comp_to_narrow = var;
3218 last_comp = var;
3219
3220 if (flag_chkp_narrow_bounds
3221 && flag_chkp_narrow_to_innermost_arrray
3222 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3223 {
3224 if (bounds)
3225 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3226 comp_to_narrow = NULL;
3227 }
3228 }
3229 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3230 /* Nothing to do for it. */
3231 ;
3232 else
3233 gcc_unreachable ();
3234 }
3235
3236 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3237 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3238
3239 if (innermost_bounds && bounds && !*bounds)
3240 *bounds = chkp_find_bounds (*ptr, iter);
3241 }
3242
3243 /* Compute and return bounds for address of OBJ. */
3244 static tree
3245 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3246 {
3247 tree bounds = chkp_get_registered_addr_bounds (obj);
3248
3249 if (bounds)
3250 return bounds;
3251
3252 switch (TREE_CODE (obj))
3253 {
3254 case VAR_DECL:
3255 case PARM_DECL:
3256 case RESULT_DECL:
3257 bounds = chkp_get_bounds_for_decl_addr (obj);
3258 break;
3259
3260 case STRING_CST:
3261 bounds = chkp_get_bounds_for_string_cst (obj);
3262 break;
3263
3264 case ARRAY_REF:
3265 case COMPONENT_REF:
3266 {
3267 tree elt;
3268 tree ptr;
3269 bool safe;
3270 bool bitfield;
3271
3272 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3273 &bitfield, &bounds, iter, true);
3274
3275 gcc_assert (bounds);
3276 }
3277 break;
3278
3279 case FUNCTION_DECL:
3280 case LABEL_DECL:
3281 bounds = chkp_get_zero_bounds ();
3282 break;
3283
3284 case MEM_REF:
3285 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3286 break;
3287
3288 case REALPART_EXPR:
3289 case IMAGPART_EXPR:
3290 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3291 break;
3292
3293 default:
3294 if (dump_file && (dump_flags & TDF_DETAILS))
3295 {
3296 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3297 "unexpected object of type %s\n",
3298 get_tree_code_name (TREE_CODE (obj)));
3299 print_node (dump_file, "", obj, 0);
3300 }
3301 internal_error ("chkp_make_addressed_object_bounds: "
3302 "Unexpected tree code %s",
3303 get_tree_code_name (TREE_CODE (obj)));
3304 }
3305
3306 chkp_register_addr_bounds (obj, bounds);
3307
3308 return bounds;
3309 }
3310
3311 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3312 to compute bounds if required. Computed bounds should be available at
3313 position pointed by ITER.
3314
3315 If PTR_SRC is NULL_TREE then pointer definition is identified.
3316
3317 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3318 PTR. If PTR is a any memory reference then ITER points to a statement
3319 after which bndldx will be inserterd. In both cases ITER will be updated
3320 to point to the inserted bndldx statement. */
3321
3322 static tree
3323 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3324 {
3325 tree addr = NULL_TREE;
3326 tree bounds = NULL_TREE;
3327
3328 if (!ptr_src)
3329 ptr_src = ptr;
3330
3331 bounds = chkp_get_registered_bounds (ptr_src);
3332
3333 if (bounds)
3334 return bounds;
3335
3336 switch (TREE_CODE (ptr_src))
3337 {
3338 case MEM_REF:
3339 case VAR_DECL:
3340 if (BOUNDED_P (ptr_src))
3341 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3342 bounds = chkp_get_zero_bounds ();
3343 else
3344 {
3345 addr = chkp_build_addr_expr (ptr_src);
3346 bounds = chkp_build_bndldx (addr, ptr, iter);
3347 }
3348 else
3349 bounds = chkp_get_nonpointer_load_bounds ();
3350 break;
3351
3352 case ARRAY_REF:
3353 case COMPONENT_REF:
3354 addr = get_base_address (ptr_src);
3355 if (DECL_P (addr)
3356 || TREE_CODE (addr) == MEM_REF
3357 || TREE_CODE (addr) == TARGET_MEM_REF)
3358 {
3359 if (BOUNDED_P (ptr_src))
3360 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3361 bounds = chkp_get_zero_bounds ();
3362 else
3363 {
3364 addr = chkp_build_addr_expr (ptr_src);
3365 bounds = chkp_build_bndldx (addr, ptr, iter);
3366 }
3367 else
3368 bounds = chkp_get_nonpointer_load_bounds ();
3369 }
3370 else
3371 {
3372 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3373 bounds = chkp_find_bounds (addr, iter);
3374 }
3375 break;
3376
3377 case PARM_DECL:
3378 gcc_unreachable ();
3379 bounds = chkp_get_bound_for_parm (ptr_src);
3380 break;
3381
3382 case TARGET_MEM_REF:
3383 addr = chkp_build_addr_expr (ptr_src);
3384 bounds = chkp_build_bndldx (addr, ptr, iter);
3385 break;
3386
3387 case SSA_NAME:
3388 bounds = chkp_get_registered_bounds (ptr_src);
3389 if (!bounds)
3390 {
3391 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3392 gimple_stmt_iterator phi_iter;
3393
3394 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3395
3396 gcc_assert (bounds);
3397
3398 if (gimple_code (def_stmt) == GIMPLE_PHI)
3399 {
3400 unsigned i;
3401
3402 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
3403 {
3404 tree arg = gimple_phi_arg_def (def_stmt, i);
3405 tree arg_bnd;
3406 gimple phi_bnd;
3407
3408 arg_bnd = chkp_find_bounds (arg, NULL);
3409
3410 /* chkp_get_bounds_by_definition created new phi
3411 statement and phi_iter points to it.
3412
3413 Previous call to chkp_find_bounds could create
3414 new basic block and therefore change phi statement
3415 phi_iter points to. */
3416 phi_bnd = gsi_stmt (phi_iter);
3417
3418 add_phi_arg (phi_bnd, arg_bnd,
3419 gimple_phi_arg_edge (def_stmt, i),
3420 UNKNOWN_LOCATION);
3421 }
3422
3423 /* If all bound phi nodes have their arg computed
3424 then we may finish its computation. See
3425 chkp_finish_incomplete_bounds for more details. */
3426 if (chkp_may_finish_incomplete_bounds ())
3427 chkp_finish_incomplete_bounds ();
3428 }
3429
3430 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3431 || chkp_incomplete_bounds (bounds));
3432 }
3433 break;
3434
3435 case ADDR_EXPR:
3436 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3437 break;
3438
3439 case INTEGER_CST:
3440 if (integer_zerop (ptr_src))
3441 bounds = chkp_get_none_bounds ();
3442 else
3443 bounds = chkp_get_invalid_op_bounds ();
3444 break;
3445
3446 default:
3447 if (dump_file && (dump_flags & TDF_DETAILS))
3448 {
3449 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3450 get_tree_code_name (TREE_CODE (ptr_src)));
3451 print_node (dump_file, "", ptr_src, 0);
3452 }
3453 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3454 get_tree_code_name (TREE_CODE (ptr_src)));
3455 }
3456
3457 if (!bounds)
3458 {
3459 if (dump_file && (dump_flags & TDF_DETAILS))
3460 {
3461 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3462 print_node (dump_file, "", ptr_src, 0);
3463 }
3464 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3465 }
3466
3467 return bounds;
3468 }
3469
3470 /* Normal case for bounds search without forced narrowing. */
3471 static tree
3472 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3473 {
3474 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3475 }
3476
3477 /* Search bounds for pointer PTR loaded from PTR_SRC
3478 by statement *ITER points to. */
3479 static tree
3480 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3481 {
3482 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3483 }
3484
3485 /* Helper function which checks type of RHS and finds all pointers in
3486 it. For each found pointer we build it's accesses in LHS and RHS
3487 objects and then call HANDLER for them. Function is used to copy
3488 or initilize bounds for copied object. */
3489 static void
3490 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3491 assign_handler handler)
3492 {
3493 tree type = TREE_TYPE (lhs);
3494
3495 /* We have nothing to do with clobbers. */
3496 if (TREE_CLOBBER_P (rhs))
3497 return;
3498
3499 if (BOUNDED_TYPE_P (type))
3500 handler (lhs, rhs, arg);
3501 else if (RECORD_OR_UNION_TYPE_P (type))
3502 {
3503 tree field;
3504
3505 if (TREE_CODE (rhs) == CONSTRUCTOR)
3506 {
3507 unsigned HOST_WIDE_INT cnt;
3508 tree val;
3509
3510 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3511 {
3512 if (chkp_type_has_pointer (TREE_TYPE (field)))
3513 {
3514 tree lhs_field = chkp_build_component_ref (lhs, field);
3515 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3516 }
3517 }
3518 }
3519 else
3520 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3521 if (TREE_CODE (field) == FIELD_DECL
3522 && chkp_type_has_pointer (TREE_TYPE (field)))
3523 {
3524 tree rhs_field = chkp_build_component_ref (rhs, field);
3525 tree lhs_field = chkp_build_component_ref (lhs, field);
3526 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3527 }
3528 }
3529 else if (TREE_CODE (type) == ARRAY_TYPE)
3530 {
3531 unsigned HOST_WIDE_INT cur = 0;
3532 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3533 tree etype = TREE_TYPE (type);
3534 tree esize = TYPE_SIZE (etype);
3535
3536 if (TREE_CODE (rhs) == CONSTRUCTOR)
3537 {
3538 unsigned HOST_WIDE_INT cnt;
3539 tree purp, val, lhs_elem;
3540
3541 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3542 {
3543 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3544 {
3545 tree lo_index = TREE_OPERAND (purp, 0);
3546 tree hi_index = TREE_OPERAND (purp, 1);
3547
3548 for (cur = (unsigned)tree_to_uhwi (lo_index);
3549 cur <= (unsigned)tree_to_uhwi (hi_index);
3550 cur++)
3551 {
3552 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3553 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3554 }
3555 }
3556 else
3557 {
3558 if (purp)
3559 {
3560 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3561 cur = tree_to_uhwi (purp);
3562 }
3563
3564 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3565
3566 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3567 }
3568 }
3569 }
3570 /* Copy array only when size is known. */
3571 else if (maxval && !integer_minus_onep (maxval))
3572 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3573 {
3574 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3575 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3576 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3577 }
3578 }
3579 else
3580 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3581 get_tree_code_name (TREE_CODE (type)));
3582 }
3583
3584 /* Add code to copy bounds for assignment of RHS to LHS.
3585 ARG is an iterator pointing ne code position. */
3586 static void
3587 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3588 {
3589 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3590 tree bounds = chkp_find_bounds (rhs, iter);
3591 tree addr = chkp_build_addr_expr(lhs);
3592
3593 chkp_build_bndstx (addr, rhs, bounds, iter);
3594 }
3595
3596 /* Emit static bound initilizers and size vars. */
3597 void
3598 chkp_finish_file (void)
3599 {
3600 struct varpool_node *node;
3601 struct chkp_ctor_stmt_list stmts;
3602
3603 if (seen_error ())
3604 return;
3605
3606 /* Iterate through varpool and generate bounds initialization
3607 constructors for all statically initialized pointers. */
3608 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3609 stmts.stmts = NULL;
3610 FOR_EACH_VARIABLE (node)
3611 /* Check that var is actually emitted and we need and may initialize
3612 its bounds. */
3613 if (node->need_bounds_init
3614 && !POINTER_BOUNDS_P (node->decl)
3615 && DECL_RTL (node->decl)
3616 && MEM_P (DECL_RTL (node->decl))
3617 && TREE_ASM_WRITTEN (node->decl))
3618 {
3619 chkp_walk_pointer_assignments (node->decl,
3620 DECL_INITIAL (node->decl),
3621 &stmts,
3622 chkp_add_modification_to_stmt_list);
3623
3624 if (stmts.avail <= 0)
3625 {
3626 cgraph_build_static_cdtor ('P', stmts.stmts,
3627 MAX_RESERVED_INIT_PRIORITY + 3);
3628 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3629 stmts.stmts = NULL;
3630 }
3631 }
3632
3633 if (stmts.stmts)
3634 cgraph_build_static_cdtor ('P', stmts.stmts,
3635 MAX_RESERVED_INIT_PRIORITY + 3);
3636
3637 /* Iterate through varpool and generate bounds initialization
3638 constructors for all static bounds vars. */
3639 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3640 stmts.stmts = NULL;
3641 FOR_EACH_VARIABLE (node)
3642 if (node->need_bounds_init
3643 && POINTER_BOUNDS_P (node->decl)
3644 && TREE_ASM_WRITTEN (node->decl))
3645 {
3646 tree bnd = node->decl;
3647 tree var;
3648
3649 gcc_assert (DECL_INITIAL (bnd)
3650 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3651
3652 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3653 chkp_output_static_bounds (bnd, var, &stmts);
3654 }
3655
3656 if (stmts.stmts)
3657 cgraph_build_static_cdtor ('B', stmts.stmts,
3658 MAX_RESERVED_INIT_PRIORITY + 2);
3659
3660 delete chkp_static_var_bounds;
3661 delete chkp_bounds_map;
3662 }
3663
3664 /* An instrumentation function which is called for each statement
3665 having memory access we want to instrument. It inserts check
3666 code and bounds copy code.
3667
3668 ITER points to statement to instrument.
3669
3670 NODE holds memory access in statement to check.
3671
3672 LOC holds the location information for statement.
3673
3674 DIRFLAGS determines whether access is read or write.
3675
3676 ACCESS_OFFS should be added to address used in NODE
3677 before check.
3678
3679 ACCESS_SIZE holds size of checked access.
3680
3681 SAFE indicates if NODE access is safe and should not be
3682 checked. */
3683 static void
3684 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3685 location_t loc, tree dirflag,
3686 tree access_offs, tree access_size,
3687 bool safe)
3688 {
3689 tree node_type = TREE_TYPE (node);
3690 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3691 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3692 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3693 tree ptr = NULL_TREE; /* a pointer used for dereference */
3694 tree bounds = NULL_TREE;
3695
3696 /* We do not need instrumentation for clobbers. */
3697 if (dirflag == integer_one_node
3698 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3699 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3700 return;
3701
3702 switch (TREE_CODE (node))
3703 {
3704 case ARRAY_REF:
3705 case COMPONENT_REF:
3706 {
3707 bool bitfield;
3708 tree elt;
3709
3710 if (safe)
3711 {
3712 /* We are not going to generate any checks, so do not
3713 generate bounds as well. */
3714 addr_first = chkp_build_addr_expr (node);
3715 break;
3716 }
3717
3718 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3719 &bitfield, &bounds, iter, false);
3720
3721 /* Break if there is no dereference and operation is safe. */
3722
3723 if (bitfield)
3724 {
3725 tree field = TREE_OPERAND (node, 1);
3726
3727 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3728 size = DECL_SIZE_UNIT (field);
3729
3730 if (elt)
3731 elt = chkp_build_addr_expr (elt);
3732 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3733 addr_first = fold_build_pointer_plus_loc (loc,
3734 addr_first,
3735 byte_position (field));
3736 }
3737 else
3738 addr_first = chkp_build_addr_expr (node);
3739 }
3740 break;
3741
3742 case INDIRECT_REF:
3743 ptr = TREE_OPERAND (node, 0);
3744 addr_first = ptr;
3745 break;
3746
3747 case MEM_REF:
3748 ptr = TREE_OPERAND (node, 0);
3749 addr_first = chkp_build_addr_expr (node);
3750 break;
3751
3752 case TARGET_MEM_REF:
3753 ptr = TMR_BASE (node);
3754 addr_first = chkp_build_addr_expr (node);
3755 break;
3756
3757 case ARRAY_RANGE_REF:
3758 printf("ARRAY_RANGE_REF\n");
3759 debug_gimple_stmt(gsi_stmt(*iter));
3760 debug_tree(node);
3761 gcc_unreachable ();
3762 break;
3763
3764 case BIT_FIELD_REF:
3765 {
3766 tree offs, rem, bpu;
3767
3768 gcc_assert (!access_offs);
3769 gcc_assert (!access_size);
3770
3771 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3772 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3773 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3774 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3775
3776 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3777 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3778 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3779 size = fold_convert (size_type_node, size);
3780
3781 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3782 dirflag, offs, size, safe);
3783 return;
3784 }
3785 break;
3786
3787 case VAR_DECL:
3788 case RESULT_DECL:
3789 case PARM_DECL:
3790 if (dirflag != integer_one_node
3791 || DECL_REGISTER (node))
3792 return;
3793
3794 safe = true;
3795 addr_first = chkp_build_addr_expr (node);
3796 break;
3797
3798 default:
3799 return;
3800 }
3801
3802 /* If addr_last was not computed then use (addr_first + size - 1)
3803 expression to compute it. */
3804 if (!addr_last)
3805 {
3806 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3807 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3808 }
3809
3810 /* Shift both first_addr and last_addr by access_offs if specified. */
3811 if (access_offs)
3812 {
3813 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3814 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3815 }
3816
3817 /* Generate bndcl/bndcu checks if memory access is not safe. */
3818 if (!safe)
3819 {
3820 gimple_stmt_iterator stmt_iter = *iter;
3821
3822 if (!bounds)
3823 bounds = chkp_find_bounds (ptr, iter);
3824
3825 chkp_check_mem_access (addr_first, addr_last, bounds,
3826 stmt_iter, loc, dirflag);
3827 }
3828
3829 /* We need to store bounds in case pointer is stored. */
3830 if (dirflag == integer_one_node
3831 && chkp_type_has_pointer (node_type)
3832 && flag_chkp_store_bounds)
3833 {
3834 gimple stmt = gsi_stmt (*iter);
3835 tree rhs1 = gimple_assign_rhs1 (stmt);
3836 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3837
3838 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3839 chkp_walk_pointer_assignments (node, rhs1, iter,
3840 chkp_copy_bounds_for_elem);
3841 else
3842 {
3843 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3844 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3845 }
3846 }
3847 }
3848
3849 /* Add code to copy bounds for all pointers copied
3850 in ASSIGN created during inline of EDGE. */
3851 void
3852 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
3853 {
3854 tree lhs = gimple_assign_lhs (assign);
3855 tree rhs = gimple_assign_rhs1 (assign);
3856 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3857
3858 if (!flag_chkp_store_bounds)
3859 return;
3860
3861 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
3862
3863 /* We should create edges for all created calls to bndldx and bndstx. */
3864 while (gsi_stmt (iter) != assign)
3865 {
3866 gimple stmt = gsi_stmt (iter);
3867 if (gimple_code (stmt) == GIMPLE_CALL)
3868 {
3869 tree fndecl = gimple_call_fndecl (stmt);
3870 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
3871 struct cgraph_edge *new_edge;
3872
3873 gcc_assert (fndecl == chkp_bndstx_fndecl
3874 || fndecl == chkp_bndldx_fndecl
3875 || fndecl == chkp_ret_bnd_fndecl);
3876
3877 new_edge = edge->caller->create_edge (callee, stmt, edge->count,
3878 edge->frequency);
3879 new_edge->frequency = compute_call_stmt_bb_frequency
3880 (edge->caller->decl, gimple_bb (stmt));
3881 }
3882 gsi_prev (&iter);
3883 }
3884 }
3885
3886 /* Some code transformation made during instrumentation pass
3887 may put code into inconsistent state. Here we find and fix
3888 such flaws. */
3889 void
3890 chkp_fix_cfg ()
3891 {
3892 basic_block bb;
3893 gimple_stmt_iterator i;
3894
3895 /* We could insert some code right after stmt which ends bb.
3896 We wanted to put this code on fallthru edge but did not
3897 add new edges from the beginning because it may cause new
3898 phi node creation which may be incorrect due to incomplete
3899 bound phi nodes. */
3900 FOR_ALL_BB_FN (bb, cfun)
3901 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
3902 {
3903 gimple stmt = gsi_stmt (i);
3904 gimple_stmt_iterator next = i;
3905
3906 gsi_next (&next);
3907
3908 if (stmt_ends_bb_p (stmt)
3909 && !gsi_end_p (next))
3910 {
3911 edge fall = find_fallthru_edge (bb->succs);
3912 basic_block dest = NULL;
3913 int flags = 0;
3914
3915 gcc_assert (fall);
3916
3917 /* We cannot split abnormal edge. Therefore we
3918 store its params, make it regular and then
3919 rebuild abnormal edge after split. */
3920 if (fall->flags & EDGE_ABNORMAL)
3921 {
3922 flags = fall->flags & ~EDGE_FALLTHRU;
3923 dest = fall->dest;
3924
3925 fall->flags &= ~EDGE_COMPLEX;
3926 }
3927
3928 while (!gsi_end_p (next))
3929 {
3930 gimple next_stmt = gsi_stmt (next);
3931 gsi_remove (&next, false);
3932 gsi_insert_on_edge (fall, next_stmt);
3933 }
3934
3935 gsi_commit_edge_inserts ();
3936
3937 /* Re-create abnormal edge. */
3938 if (dest)
3939 make_edge (bb, dest, flags);
3940 }
3941 }
3942 }
3943
3944 /* Walker callback for chkp_replace_function_pointers. Replaces
3945 function pointer in the specified operand with pointer to the
3946 instrumented function version. */
3947 static tree
3948 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
3949 void *data ATTRIBUTE_UNUSED)
3950 {
3951 if (TREE_CODE (*op) == FUNCTION_DECL
3952 && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
3953 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
3954 /* For builtins we replace pointers only for selected
3955 function and functions having definitions. */
3956 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
3957 && (chkp_instrument_normal_builtin (*op)
3958 || gimple_has_body_p (*op)))))
3959 {
3960 struct cgraph_node *node = cgraph_node::get_create (*op);
3961 struct cgraph_node *clone = NULL;
3962
3963 if (!node->instrumentation_clone)
3964 clone = chkp_maybe_create_clone (*op);
3965
3966 if (clone)
3967 *op = clone->decl;
3968 *walk_subtrees = 0;
3969 }
3970
3971 return NULL;
3972 }
3973
3974 /* This function searches for function pointers in statement
3975 pointed by GSI and replaces them with pointers to instrumented
3976 function versions. */
3977 static void
3978 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
3979 {
3980 gimple stmt = gsi_stmt (*gsi);
3981 /* For calls we want to walk call args only. */
3982 if (gimple_code (stmt) == GIMPLE_CALL)
3983 {
3984 unsigned i;
3985 for (i = 0; i < gimple_call_num_args (stmt); i++)
3986 walk_tree (gimple_call_arg_ptr (stmt, i),
3987 chkp_replace_function_pointer, NULL, NULL);
3988 }
3989 else
3990 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
3991 }
3992
3993 /* This function instruments all statements working with memory,
3994 calls and rets.
3995
3996 It also removes excess statements from static initializers. */
3997 static void
3998 chkp_instrument_function (void)
3999 {
4000 basic_block bb, next;
4001 gimple_stmt_iterator i;
4002 enum gimple_rhs_class grhs_class;
4003 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4004
4005 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4006 do
4007 {
4008 next = bb->next_bb;
4009 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4010 {
4011 gimple s = gsi_stmt (i);
4012
4013 /* Skip statement marked to not be instrumented. */
4014 if (chkp_marked_stmt_p (s))
4015 {
4016 gsi_next (&i);
4017 continue;
4018 }
4019
4020 chkp_replace_function_pointers (&i);
4021
4022 switch (gimple_code (s))
4023 {
4024 case GIMPLE_ASSIGN:
4025 chkp_process_stmt (&i, gimple_assign_lhs (s),
4026 gimple_location (s), integer_one_node,
4027 NULL_TREE, NULL_TREE, safe);
4028 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4029 gimple_location (s), integer_zero_node,
4030 NULL_TREE, NULL_TREE, safe);
4031 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4032 if (grhs_class == GIMPLE_BINARY_RHS)
4033 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4034 gimple_location (s), integer_zero_node,
4035 NULL_TREE, NULL_TREE, safe);
4036 break;
4037
4038 case GIMPLE_RETURN:
4039 if (gimple_return_retval (s) != NULL_TREE)
4040 {
4041 chkp_process_stmt (&i, gimple_return_retval (s),
4042 gimple_location (s),
4043 integer_zero_node,
4044 NULL_TREE, NULL_TREE, safe);
4045
4046 /* Additionally we need to add bounds
4047 to return statement. */
4048 chkp_add_bounds_to_ret_stmt (&i);
4049 }
4050 break;
4051
4052 case GIMPLE_CALL:
4053 chkp_add_bounds_to_call_stmt (&i);
4054 break;
4055
4056 default:
4057 ;
4058 }
4059
4060 gsi_next (&i);
4061
4062 /* We do not need any actual pointer stores in checker
4063 static initializer. */
4064 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4065 && gimple_code (s) == GIMPLE_ASSIGN
4066 && gimple_store_p (s))
4067 {
4068 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4069 gsi_remove (&del_iter, true);
4070 unlink_stmt_vdef (s);
4071 release_defs(s);
4072 }
4073 }
4074 bb = next;
4075 }
4076 while (bb);
4077
4078 /* Some input params may have bounds and be address taken. In this case
4079 we should store incoming bounds into bounds table. */
4080 tree arg;
4081 if (flag_chkp_store_bounds)
4082 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4083 if (TREE_ADDRESSABLE (arg))
4084 {
4085 if (BOUNDED_P (arg))
4086 {
4087 tree bounds = chkp_get_next_bounds_parm (arg);
4088 tree def_ptr = ssa_default_def (cfun, arg);
4089 gimple_stmt_iterator iter
4090 = gsi_start_bb (chkp_get_entry_block ());
4091 chkp_build_bndstx (chkp_build_addr_expr (arg),
4092 def_ptr ? def_ptr : arg,
4093 bounds, &iter);
4094
4095 /* Skip bounds arg. */
4096 arg = TREE_CHAIN (arg);
4097 }
4098 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4099 {
4100 tree orig_arg = arg;
4101 bitmap slots = BITMAP_ALLOC (NULL);
4102 gimple_stmt_iterator iter
4103 = gsi_start_bb (chkp_get_entry_block ());
4104 bitmap_iterator bi;
4105 unsigned bnd_no;
4106
4107 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4108
4109 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4110 {
4111 tree bounds = chkp_get_next_bounds_parm (arg);
4112 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4113 tree addr = chkp_build_addr_expr (orig_arg);
4114 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4115 build_int_cst (ptr_type_node, offs));
4116 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4117 bounds, &iter);
4118
4119 arg = DECL_CHAIN (arg);
4120 }
4121 BITMAP_FREE (slots);
4122 }
4123 }
4124 }
4125
4126 /* Find init/null/copy_ptr_bounds calls and replace them
4127 with assignments. It should allow better code
4128 optimization. */
4129
4130 static void
4131 chkp_remove_useless_builtins ()
4132 {
4133 basic_block bb;
4134 gimple_stmt_iterator gsi;
4135
4136 FOR_EACH_BB_FN (bb, cfun)
4137 {
4138 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4139 {
4140 gimple stmt = gsi_stmt (gsi);
4141 tree fndecl;
4142 enum built_in_function fcode;
4143
4144 /* Find builtins returning first arg and replace
4145 them with assignments. */
4146 if (gimple_code (stmt) == GIMPLE_CALL
4147 && (fndecl = gimple_call_fndecl (stmt))
4148 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4149 && (fcode = DECL_FUNCTION_CODE (fndecl))
4150 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4151 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4152 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4153 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4154 {
4155 tree res = gimple_call_arg (stmt, 0);
4156 update_call_from_tree (&gsi, res);
4157 stmt = gsi_stmt (gsi);
4158 update_stmt (stmt);
4159 }
4160 }
4161 }
4162 }
4163
4164 /* Initialize pass. */
4165 static void
4166 chkp_init (void)
4167 {
4168 basic_block bb;
4169 gimple_stmt_iterator i;
4170
4171 in_chkp_pass = true;
4172
4173 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4174 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4175 chkp_unmark_stmt (gsi_stmt (i));
4176
4177 chkp_invalid_bounds = new hash_set<tree>;
4178 chkp_completed_bounds_set = new hash_set<tree>;
4179 delete chkp_reg_bounds;
4180 chkp_reg_bounds = new hash_map<tree, tree>;
4181 delete chkp_bound_vars;
4182 chkp_bound_vars = new hash_map<tree, tree>;
4183 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4184 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4185 delete chkp_bounds_map;
4186 chkp_bounds_map = new hash_map<tree, tree>;
4187 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4188
4189 entry_block = NULL;
4190 zero_bounds = NULL_TREE;
4191 none_bounds = NULL_TREE;
4192 incomplete_bounds = integer_zero_node;
4193 tmp_var = NULL_TREE;
4194 size_tmp_var = NULL_TREE;
4195
4196 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4197
4198 /* We create these constant bounds once for each object file.
4199 These symbols go to comdat section and result in single copy
4200 of each one in the final binary. */
4201 chkp_get_zero_bounds_var ();
4202 chkp_get_none_bounds_var ();
4203
4204 calculate_dominance_info (CDI_DOMINATORS);
4205 calculate_dominance_info (CDI_POST_DOMINATORS);
4206
4207 bitmap_obstack_initialize (NULL);
4208 }
4209
4210 /* Finalize instrumentation pass. */
4211 static void
4212 chkp_fini (void)
4213 {
4214 in_chkp_pass = false;
4215
4216 delete chkp_invalid_bounds;
4217 delete chkp_completed_bounds_set;
4218 delete chkp_reg_addr_bounds;
4219 delete chkp_incomplete_bounds_map;
4220
4221 free_dominance_info (CDI_DOMINATORS);
4222 free_dominance_info (CDI_POST_DOMINATORS);
4223
4224 bitmap_obstack_release (NULL);
4225 }
4226
4227 /* Main instrumentation pass function. */
4228 static unsigned int
4229 chkp_execute (void)
4230 {
4231 chkp_init ();
4232
4233 chkp_instrument_function ();
4234
4235 chkp_remove_useless_builtins ();
4236
4237 chkp_function_mark_instrumented (cfun->decl);
4238
4239 chkp_fix_cfg ();
4240
4241 chkp_fini ();
4242
4243 return 0;
4244 }
4245
4246 /* Instrumentation pass gate. */
4247 static bool
4248 chkp_gate (void)
4249 {
4250 return cgraph_node::get (cfun->decl)->instrumentation_clone
4251 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4252 }
4253
4254 namespace {
4255
4256 const pass_data pass_data_chkp =
4257 {
4258 GIMPLE_PASS, /* type */
4259 "chkp", /* name */
4260 OPTGROUP_NONE, /* optinfo_flags */
4261 TV_NONE, /* tv_id */
4262 PROP_ssa | PROP_cfg, /* properties_required */
4263 0, /* properties_provided */
4264 0, /* properties_destroyed */
4265 0, /* todo_flags_start */
4266 TODO_verify_il
4267 | TODO_update_ssa /* todo_flags_finish */
4268 };
4269
4270 class pass_chkp : public gimple_opt_pass
4271 {
4272 public:
4273 pass_chkp (gcc::context *ctxt)
4274 : gimple_opt_pass (pass_data_chkp, ctxt)
4275 {}
4276
4277 /* opt_pass methods: */
4278 virtual opt_pass * clone ()
4279 {
4280 return new pass_chkp (m_ctxt);
4281 }
4282
4283 virtual bool gate (function *)
4284 {
4285 return chkp_gate ();
4286 }
4287
4288 virtual unsigned int execute (function *)
4289 {
4290 return chkp_execute ();
4291 }
4292
4293 }; // class pass_chkp
4294
4295 } // anon namespace
4296
4297 gimple_opt_pass *
4298 make_pass_chkp (gcc::context *ctxt)
4299 {
4300 return new pass_chkp (ctxt);
4301 }
4302
4303 #include "gt-tree-chkp.h"