IA MCU psABI support: changes to libraries
[gcc.git] / gcc / tree-chkp.c
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stor-layout.h"
30 #include "varasm.h"
31 #include "target.h"
32 #include "tree-iterator.h"
33 #include "tree-cfg.h"
34 #include "langhooks.h"
35 #include "tree-pass.h"
36 #include "diagnostic.h"
37 #include "cfgloop.h"
38 #include "stringpool.h"
39 #include "tree-ssa-alias.h"
40 #include "tree-ssanames.h"
41 #include "tree-ssa-operands.h"
42 #include "tree-ssa-address.h"
43 #include "tree-ssa.h"
44 #include "predict.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "basic-block.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "gimple-expr.h"
50 #include "gimple.h"
51 #include "tree-phinodes.h"
52 #include "gimple-ssa.h"
53 #include "ssa-iterators.h"
54 #include "gimple-pretty-print.h"
55 #include "gimple-iterator.h"
56 #include "gimplify.h"
57 #include "gimplify-me.h"
58 #include "print-tree.h"
59 #include "tm.h"
60 #include "hard-reg-set.h"
61 #include "function.h"
62 #include "rtl.h"
63 #include "flags.h"
64 #include "insn-config.h"
65 #include "expmed.h"
66 #include "dojump.h"
67 #include "explow.h"
68 #include "calls.h"
69 #include "emit-rtl.h"
70 #include "stmt.h"
71 #include "expr.h"
72 #include "tree-ssa-propagate.h"
73 #include "gimple-fold.h"
74 #include "tree-chkp.h"
75 #include "gimple-walk.h"
76 #include "rtl.h" /* For MEM_P, assign_temp. */
77 #include "tree-dfa.h"
78 #include "lto-streamer.h"
79 #include "cgraph.h"
80 #include "ipa-chkp.h"
81 #include "params.h"
82
83 /* Pointer Bounds Checker instruments code with memory checks to find
84 out-of-bounds memory accesses. Checks are performed by computing
85 bounds for each pointer and then comparing address of accessed
86 memory before pointer dereferencing.
87
88 1. Function clones.
89
90 See ipa-chkp.c.
91
92 2. Instrumentation.
93
94 There are few things to instrument:
95
96 a) Memory accesses - add checker calls to check address of accessed memory
97 against bounds of dereferenced pointer. Obviously safe memory
98 accesses like static variable access does not have to be instrumented
99 with checks.
100
101 Example:
102
103 val_2 = *p_1;
104
105 with 4 bytes access is transformed into:
106
107 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
108 D.1_4 = p_1 + 3;
109 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
110 val_2 = *p_1;
111
112 where __bound_tmp.1_3 are bounds computed for pointer p_1,
113 __builtin___chkp_bndcl is a lower bound check and
114 __builtin___chkp_bndcu is an upper bound check.
115
116 b) Pointer stores.
117
118 When pointer is stored in memory we need to store its bounds. To
119 achieve compatibility of instrumented code with regular codes
120 we have to keep data layout and store bounds in special bound tables
121 via special checker call. Implementation of bounds table may vary for
122 different platforms. It has to associate pointer value and its
123 location (it is required because we may have two equal pointers
124 with different bounds stored in different places) with bounds.
125 Another checker builtin allows to get bounds for specified pointer
126 loaded from specified location.
127
128 Example:
129
130 buf1[i_1] = &buf2;
131
132 is transformed into:
133
134 buf1[i_1] = &buf2;
135 D.1_2 = &buf1[i_1];
136 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
137
138 where __bound_tmp.1_2 are bounds of &buf2.
139
140 c) Static initialization.
141
142 The special case of pointer store is static pointer initialization.
143 Bounds initialization is performed in a few steps:
144 - register all static initializations in front-end using
145 chkp_register_var_initializer
146 - when file compilation finishes we create functions with special
147 attribute 'chkp ctor' and put explicit initialization code
148 (assignments) for all statically initialized pointers.
149 - when checker constructor is compiled checker pass adds required
150 bounds initialization for all statically initialized pointers
151 - since we do not actually need excess pointers initialization
152 in checker constructor we remove such assignments from them
153
154 d) Calls.
155
156 For each call in the code we add additional arguments to pass
157 bounds for pointer arguments. We determine type of call arguments
158 using arguments list from function declaration; if function
159 declaration is not available we use function type; otherwise
160 (e.g. for unnamed arguments) we use type of passed value. Function
161 declaration/type is replaced with the instrumented one.
162
163 Example:
164
165 val_1 = foo (&buf1, &buf2, &buf1, 0);
166
167 is translated into:
168
169 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
170 &buf1, __bound_tmp.1_2, 0);
171
172 e) Returns.
173
174 If function returns a pointer value we have to return bounds also.
175 A new operand was added for return statement to hold returned bounds.
176
177 Example:
178
179 return &_buf1;
180
181 is transformed into
182
183 return &_buf1, __bound_tmp.1_1;
184
185 3. Bounds computation.
186
187 Compiler is fully responsible for computing bounds to be used for each
188 memory access. The first step for bounds computation is to find the
189 origin of pointer dereferenced for memory access. Basing on pointer
190 origin we define a way to compute its bounds. There are just few
191 possible cases:
192
193 a) Pointer is returned by call.
194
195 In this case we use corresponding checker builtin method to obtain returned
196 bounds.
197
198 Example:
199
200 buf_1 = malloc (size_2);
201 foo (buf_1);
202
203 is translated into:
204
205 buf_1 = malloc (size_2);
206 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
207 foo (buf_1, __bound_tmp.1_3);
208
209 b) Pointer is an address of an object.
210
211 In this case compiler tries to compute objects size and create corresponding
212 bounds. If object has incomplete type then special checker builtin is used to
213 obtain its size at runtime.
214
215 Example:
216
217 foo ()
218 {
219 <unnamed type> __bound_tmp.3;
220 static int buf[100];
221
222 <bb 3>:
223 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
224
225 <bb 2>:
226 return &buf, __bound_tmp.3_2;
227 }
228
229 Example:
230
231 Address of an object 'extern int buf[]' with incomplete type is
232 returned.
233
234 foo ()
235 {
236 <unnamed type> __bound_tmp.4;
237 long unsigned int __size_tmp.3;
238
239 <bb 3>:
240 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
241 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
242
243 <bb 2>:
244 return &buf, __bound_tmp.4_3;
245 }
246
247 c) Pointer is the result of object narrowing.
248
249 It happens when we use pointer to an object to compute pointer to a part
250 of an object. E.g. we take pointer to a field of a structure. In this
251 case we perform bounds intersection using bounds of original object and
252 bounds of object's part (which are computed basing on its type).
253
254 There may be some debatable questions about when narrowing should occur
255 and when it should not. To avoid false bound violations in correct
256 programs we do not perform narrowing when address of an array element is
257 obtained (it has address of the whole array) and when address of the first
258 structure field is obtained (because it is guaranteed to be equal to
259 address of the whole structure and it is legal to cast it back to structure).
260
261 Default narrowing behavior may be changed using compiler flags.
262
263 Example:
264
265 In this example address of the second structure field is returned.
266
267 foo (struct A * p, __bounds_type __bounds_of_p)
268 {
269 <unnamed type> __bound_tmp.3;
270 int * _2;
271 int * _5;
272
273 <bb 2>:
274 _5 = &p_1(D)->second_field;
275 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
276 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
277 __bounds_of_p_3(D));
278 _2 = &p_1(D)->second_field;
279 return _2, __bound_tmp.3_8;
280 }
281
282 Example:
283
284 In this example address of the first field of array element is returned.
285
286 foo (struct A * p, __bounds_type __bounds_of_p, int i)
287 {
288 long unsigned int _3;
289 long unsigned int _4;
290 struct A * _6;
291 int * _7;
292
293 <bb 2>:
294 _3 = (long unsigned int) i_1(D);
295 _4 = _3 * 8;
296 _6 = p_5(D) + _4;
297 _7 = &_6->first_field;
298 return _7, __bounds_of_p_2(D);
299 }
300
301
302 d) Pointer is the result of pointer arithmetic or type cast.
303
304 In this case bounds of the base pointer are used. In case of binary
305 operation producing a pointer we are analyzing data flow further
306 looking for operand's bounds. One operand is considered as a base
307 if it has some valid bounds. If we fall into a case when none of
308 operands (or both of them) has valid bounds, a default bounds value
309 is used.
310
311 Trying to find out bounds for binary operations we may fall into
312 cyclic dependencies for pointers. To avoid infinite recursion all
313 walked phi nodes instantly obtain corresponding bounds but created
314 bounds are marked as incomplete. It helps us to stop DF walk during
315 bounds search.
316
317 When we reach pointer source, some args of incomplete bounds phi obtain
318 valid bounds and those values are propagated further through phi nodes.
319 If no valid bounds were found for phi node then we mark its result as
320 invalid bounds. Process stops when all incomplete bounds become either
321 valid or invalid and we are able to choose a pointer base.
322
323 e) Pointer is loaded from the memory.
324
325 In this case we just need to load bounds from the bounds table.
326
327 Example:
328
329 foo ()
330 {
331 <unnamed type> __bound_tmp.3;
332 static int * buf;
333 int * _2;
334
335 <bb 2>:
336 _2 = buf;
337 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
338 return _2, __bound_tmp.3_4;
339 }
340
341 */
342
343 typedef void (*assign_handler)(tree, tree, void *);
344
345 static tree chkp_get_zero_bounds ();
346 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
347 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
348 gimple_stmt_iterator *iter);
349 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
350 tree *elt, bool *safe,
351 bool *bitfield,
352 tree *bounds,
353 gimple_stmt_iterator *iter,
354 bool innermost_bounds);
355
356 #define chkp_bndldx_fndecl \
357 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
358 #define chkp_bndstx_fndecl \
359 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
360 #define chkp_checkl_fndecl \
361 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
362 #define chkp_checku_fndecl \
363 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
364 #define chkp_bndmk_fndecl \
365 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
366 #define chkp_ret_bnd_fndecl \
367 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
368 #define chkp_intersect_fndecl \
369 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
370 #define chkp_narrow_bounds_fndecl \
371 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
372 #define chkp_sizeof_fndecl \
373 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
374 #define chkp_extract_lower_fndecl \
375 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
376 #define chkp_extract_upper_fndecl \
377 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
378
379 static GTY (()) tree chkp_uintptr_type;
380
381 static GTY (()) tree chkp_zero_bounds_var;
382 static GTY (()) tree chkp_none_bounds_var;
383
384 static GTY (()) basic_block entry_block;
385 static GTY (()) tree zero_bounds;
386 static GTY (()) tree none_bounds;
387 static GTY (()) tree incomplete_bounds;
388 static GTY (()) tree tmp_var;
389 static GTY (()) tree size_tmp_var;
390 static GTY (()) bitmap chkp_abnormal_copies;
391
392 struct hash_set<tree> *chkp_invalid_bounds;
393 struct hash_set<tree> *chkp_completed_bounds_set;
394 struct hash_map<tree, tree> *chkp_reg_bounds;
395 struct hash_map<tree, tree> *chkp_bound_vars;
396 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
397 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
398 struct hash_map<tree, tree> *chkp_bounds_map;
399 struct hash_map<tree, tree> *chkp_static_var_bounds;
400
401 static bool in_chkp_pass;
402
403 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
404 #define CHKP_SIZE_TMP_NAME "__size_tmp"
405 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
406 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
407 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
408 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
409 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
410
411 /* Static checker constructors may become very large and their
412 compilation with optimization may take too much time.
413 Therefore we put a limit to number of statements in one
414 constructor. Tests with 100 000 statically initialized
415 pointers showed following compilation times on Sandy Bridge
416 server (used -O2):
417 limit 100 => ~18 sec.
418 limit 300 => ~22 sec.
419 limit 1000 => ~30 sec.
420 limit 3000 => ~49 sec.
421 limit 5000 => ~55 sec.
422 limit 10000 => ~76 sec.
423 limit 100000 => ~532 sec. */
424 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
425
426 struct chkp_ctor_stmt_list
427 {
428 tree stmts;
429 int avail;
430 };
431
432 /* Return 1 if function FNDECL is instrumented by Pointer
433 Bounds Checker. */
434 bool
435 chkp_function_instrumented_p (tree fndecl)
436 {
437 return fndecl
438 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
439 }
440
441 /* Mark function FNDECL as instrumented. */
442 void
443 chkp_function_mark_instrumented (tree fndecl)
444 {
445 if (chkp_function_instrumented_p (fndecl))
446 return;
447
448 DECL_ATTRIBUTES (fndecl)
449 = tree_cons (get_identifier ("chkp instrumented"), NULL,
450 DECL_ATTRIBUTES (fndecl));
451 }
452
453 /* Return true when STMT is builtin call to instrumentation function
454 corresponding to CODE. */
455
456 bool
457 chkp_gimple_call_builtin_p (gimple call,
458 enum built_in_function code)
459 {
460 tree fndecl;
461 if (is_gimple_call (call)
462 && (fndecl = targetm.builtin_chkp_function (code))
463 && gimple_call_fndecl (call) == fndecl)
464 return true;
465 return false;
466 }
467
468 /* Emit code to build zero bounds and return RTL holding
469 the result. */
470 rtx
471 chkp_expand_zero_bounds ()
472 {
473 tree zero_bnd;
474
475 if (flag_chkp_use_static_const_bounds)
476 zero_bnd = chkp_get_zero_bounds_var ();
477 else
478 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
479 integer_zero_node);
480 return expand_normal (zero_bnd);
481 }
482
483 /* Emit code to store zero bounds for PTR located at MEM. */
484 void
485 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
486 {
487 tree zero_bnd, bnd, addr, bndstx;
488
489 if (flag_chkp_use_static_const_bounds)
490 zero_bnd = chkp_get_zero_bounds_var ();
491 else
492 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
493 integer_zero_node);
494 bnd = make_tree (pointer_bounds_type_node,
495 assign_temp (pointer_bounds_type_node, 0, 1));
496 addr = build1 (ADDR_EXPR,
497 build_pointer_type (TREE_TYPE (mem)), mem);
498 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
499
500 expand_assignment (bnd, zero_bnd, false);
501 expand_normal (bndstx);
502 }
503
504 /* Build retbnd call for returned value RETVAL.
505
506 If BNDVAL is not NULL then result is stored
507 in it. Otherwise a temporary is created to
508 hold returned value.
509
510 GSI points to a position for a retbnd call
511 and is set to created stmt.
512
513 Cgraph edge is created for a new call if
514 UPDATE_EDGE is 1.
515
516 Obtained bounds are returned. */
517 tree
518 chkp_insert_retbnd_call (tree bndval, tree retval,
519 gimple_stmt_iterator *gsi)
520 {
521 gimple call;
522
523 if (!bndval)
524 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
525
526 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
527 gimple_call_set_lhs (call, bndval);
528 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
529
530 return bndval;
531 }
532
533 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
534 arguments. */
535
536 gcall *
537 chkp_copy_call_skip_bounds (gcall *call)
538 {
539 bitmap bounds;
540 unsigned i;
541
542 bitmap_obstack_initialize (NULL);
543 bounds = BITMAP_ALLOC (NULL);
544
545 for (i = 0; i < gimple_call_num_args (call); i++)
546 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
547 bitmap_set_bit (bounds, i);
548
549 if (!bitmap_empty_p (bounds))
550 call = gimple_call_copy_skip_args (call, bounds);
551 gimple_call_set_with_bounds (call, false);
552
553 BITMAP_FREE (bounds);
554 bitmap_obstack_release (NULL);
555
556 return call;
557 }
558
559 /* Redirect edge E to the correct node according to call_stmt.
560 Return 1 if bounds removal from call_stmt should be done
561 instead of redirection. */
562
563 bool
564 chkp_redirect_edge (cgraph_edge *e)
565 {
566 bool instrumented = false;
567 tree decl = e->callee->decl;
568
569 if (e->callee->instrumentation_clone
570 || chkp_function_instrumented_p (decl))
571 instrumented = true;
572
573 if (instrumented
574 && !gimple_call_with_bounds_p (e->call_stmt))
575 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
576 else if (!instrumented
577 && gimple_call_with_bounds_p (e->call_stmt)
578 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
579 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
580 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
581 {
582 if (e->callee->instrumented_version)
583 e->redirect_callee (e->callee->instrumented_version);
584 else
585 {
586 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
587 /* Avoid bounds removal if all args will be removed. */
588 if (!args || TREE_VALUE (args) != void_type_node)
589 return true;
590 else
591 gimple_call_set_with_bounds (e->call_stmt, false);
592 }
593 }
594
595 return false;
596 }
597
598 /* Mark statement S to not be instrumented. */
599 static void
600 chkp_mark_stmt (gimple s)
601 {
602 gimple_set_plf (s, GF_PLF_1, true);
603 }
604
605 /* Mark statement S to be instrumented. */
606 static void
607 chkp_unmark_stmt (gimple s)
608 {
609 gimple_set_plf (s, GF_PLF_1, false);
610 }
611
612 /* Return 1 if statement S should not be instrumented. */
613 static bool
614 chkp_marked_stmt_p (gimple s)
615 {
616 return gimple_plf (s, GF_PLF_1);
617 }
618
619 /* Get var to be used for bound temps. */
620 static tree
621 chkp_get_tmp_var (void)
622 {
623 if (!tmp_var)
624 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
625
626 return tmp_var;
627 }
628
629 /* Get SSA_NAME to be used as temp. */
630 static tree
631 chkp_get_tmp_reg (gimple stmt)
632 {
633 if (in_chkp_pass)
634 return make_ssa_name (chkp_get_tmp_var (), stmt);
635
636 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
637 CHKP_BOUND_TMP_NAME);
638 }
639
640 /* Get var to be used for size temps. */
641 static tree
642 chkp_get_size_tmp_var (void)
643 {
644 if (!size_tmp_var)
645 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
646
647 return size_tmp_var;
648 }
649
650 /* Register bounds BND for address of OBJ. */
651 static void
652 chkp_register_addr_bounds (tree obj, tree bnd)
653 {
654 if (bnd == incomplete_bounds)
655 return;
656
657 chkp_reg_addr_bounds->put (obj, bnd);
658
659 if (dump_file && (dump_flags & TDF_DETAILS))
660 {
661 fprintf (dump_file, "Regsitered bound ");
662 print_generic_expr (dump_file, bnd, 0);
663 fprintf (dump_file, " for address of ");
664 print_generic_expr (dump_file, obj, 0);
665 fprintf (dump_file, "\n");
666 }
667 }
668
669 /* Return bounds registered for address of OBJ. */
670 static tree
671 chkp_get_registered_addr_bounds (tree obj)
672 {
673 tree *slot = chkp_reg_addr_bounds->get (obj);
674 return slot ? *slot : NULL_TREE;
675 }
676
677 /* Mark BOUNDS as completed. */
678 static void
679 chkp_mark_completed_bounds (tree bounds)
680 {
681 chkp_completed_bounds_set->add (bounds);
682
683 if (dump_file && (dump_flags & TDF_DETAILS))
684 {
685 fprintf (dump_file, "Marked bounds ");
686 print_generic_expr (dump_file, bounds, 0);
687 fprintf (dump_file, " as completed\n");
688 }
689 }
690
691 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
692 static bool
693 chkp_completed_bounds (tree bounds)
694 {
695 return chkp_completed_bounds_set->contains (bounds);
696 }
697
698 /* Clear comleted bound marks. */
699 static void
700 chkp_erase_completed_bounds (void)
701 {
702 delete chkp_completed_bounds_set;
703 chkp_completed_bounds_set = new hash_set<tree>;
704 }
705
706 /* Mark BOUNDS associated with PTR as incomplete. */
707 static void
708 chkp_register_incomplete_bounds (tree bounds, tree ptr)
709 {
710 chkp_incomplete_bounds_map->put (bounds, ptr);
711
712 if (dump_file && (dump_flags & TDF_DETAILS))
713 {
714 fprintf (dump_file, "Regsitered incomplete bounds ");
715 print_generic_expr (dump_file, bounds, 0);
716 fprintf (dump_file, " for ");
717 print_generic_expr (dump_file, ptr, 0);
718 fprintf (dump_file, "\n");
719 }
720 }
721
722 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
723 static bool
724 chkp_incomplete_bounds (tree bounds)
725 {
726 if (bounds == incomplete_bounds)
727 return true;
728
729 if (chkp_completed_bounds (bounds))
730 return false;
731
732 return chkp_incomplete_bounds_map->get (bounds) != NULL;
733 }
734
735 /* Clear incomleted bound marks. */
736 static void
737 chkp_erase_incomplete_bounds (void)
738 {
739 delete chkp_incomplete_bounds_map;
740 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
741 }
742
743 /* Build and return bndmk call which creates bounds for structure
744 pointed by PTR. Structure should have complete type. */
745 tree
746 chkp_make_bounds_for_struct_addr (tree ptr)
747 {
748 tree type = TREE_TYPE (ptr);
749 tree size;
750
751 gcc_assert (POINTER_TYPE_P (type));
752
753 size = TYPE_SIZE (TREE_TYPE (type));
754
755 gcc_assert (size);
756
757 return build_call_nary (pointer_bounds_type_node,
758 build_fold_addr_expr (chkp_bndmk_fndecl),
759 2, ptr, size);
760 }
761
762 /* Traversal function for chkp_may_finish_incomplete_bounds.
763 Set RES to 0 if at least one argument of phi statement
764 defining bounds (passed in KEY arg) is unknown.
765 Traversal stops when first unknown phi argument is found. */
766 bool
767 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
768 bool *res)
769 {
770 gimple phi;
771 unsigned i;
772
773 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
774
775 phi = SSA_NAME_DEF_STMT (bounds);
776
777 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
778
779 for (i = 0; i < gimple_phi_num_args (phi); i++)
780 {
781 tree phi_arg = gimple_phi_arg_def (phi, i);
782 if (!phi_arg)
783 {
784 *res = false;
785 /* Do not need to traverse further. */
786 return false;
787 }
788 }
789
790 return true;
791 }
792
793 /* Return 1 if all phi nodes created for bounds have their
794 arguments computed. */
795 static bool
796 chkp_may_finish_incomplete_bounds (void)
797 {
798 bool res = true;
799
800 chkp_incomplete_bounds_map
801 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
802
803 return res;
804 }
805
806 /* Helper function for chkp_finish_incomplete_bounds.
807 Recompute args for bounds phi node. */
808 bool
809 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
810 void *res ATTRIBUTE_UNUSED)
811 {
812 tree ptr = *slot;
813 gphi *bounds_phi;
814 gphi *ptr_phi;
815 unsigned i;
816
817 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
818 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
819
820 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
821 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
822
823 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
824 {
825 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
826 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
827
828 add_phi_arg (bounds_phi, bound_arg,
829 gimple_phi_arg_edge (ptr_phi, i),
830 UNKNOWN_LOCATION);
831 }
832
833 return true;
834 }
835
836 /* Mark BOUNDS as invalid. */
837 static void
838 chkp_mark_invalid_bounds (tree bounds)
839 {
840 chkp_invalid_bounds->add (bounds);
841
842 if (dump_file && (dump_flags & TDF_DETAILS))
843 {
844 fprintf (dump_file, "Marked bounds ");
845 print_generic_expr (dump_file, bounds, 0);
846 fprintf (dump_file, " as invalid\n");
847 }
848 }
849
850 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
851 static bool
852 chkp_valid_bounds (tree bounds)
853 {
854 if (bounds == zero_bounds || bounds == none_bounds)
855 return false;
856
857 return !chkp_invalid_bounds->contains (bounds);
858 }
859
860 /* Helper function for chkp_finish_incomplete_bounds.
861 Check all arguments of phi nodes trying to find
862 valid completed bounds. If there is at least one
863 such arg then bounds produced by phi node are marked
864 as valid completed bounds and all phi args are
865 recomputed. */
866 bool
867 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
868 {
869 gimple phi;
870 unsigned i;
871
872 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
873
874 if (chkp_completed_bounds (bounds))
875 return true;
876
877 phi = SSA_NAME_DEF_STMT (bounds);
878
879 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
880
881 for (i = 0; i < gimple_phi_num_args (phi); i++)
882 {
883 tree phi_arg = gimple_phi_arg_def (phi, i);
884
885 gcc_assert (phi_arg);
886
887 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
888 {
889 *res = true;
890 chkp_mark_completed_bounds (bounds);
891 chkp_recompute_phi_bounds (bounds, slot, NULL);
892 return true;
893 }
894 }
895
896 return true;
897 }
898
899 /* Helper function for chkp_finish_incomplete_bounds.
900 Marks all incompleted bounds as invalid. */
901 bool
902 chkp_mark_invalid_bounds_walker (tree const &bounds,
903 tree *slot ATTRIBUTE_UNUSED,
904 void *res ATTRIBUTE_UNUSED)
905 {
906 if (!chkp_completed_bounds (bounds))
907 {
908 chkp_mark_invalid_bounds (bounds);
909 chkp_mark_completed_bounds (bounds);
910 }
911 return true;
912 }
913
914 /* When all bound phi nodes have all their args computed
915 we have enough info to find valid bounds. We iterate
916 through all incompleted bounds searching for valid
917 bounds. Found valid bounds are marked as completed
918 and all remaining incompleted bounds are recomputed.
919 Process continues until no new valid bounds may be
920 found. All remained incompleted bounds are marked as
921 invalid (i.e. have no valid source of bounds). */
922 static void
923 chkp_finish_incomplete_bounds (void)
924 {
925 bool found_valid;
926
927 while (found_valid)
928 {
929 found_valid = false;
930
931 chkp_incomplete_bounds_map->
932 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
933
934 if (found_valid)
935 chkp_incomplete_bounds_map->
936 traverse<void *, chkp_recompute_phi_bounds> (NULL);
937 }
938
939 chkp_incomplete_bounds_map->
940 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
941 chkp_incomplete_bounds_map->
942 traverse<void *, chkp_recompute_phi_bounds> (NULL);
943
944 chkp_erase_completed_bounds ();
945 chkp_erase_incomplete_bounds ();
946 }
947
948 /* Return 1 if type TYPE is a pointer type or a
949 structure having a pointer type as one of its fields.
950 Otherwise return 0. */
951 bool
952 chkp_type_has_pointer (const_tree type)
953 {
954 bool res = false;
955
956 if (BOUNDED_TYPE_P (type))
957 res = true;
958 else if (RECORD_OR_UNION_TYPE_P (type))
959 {
960 tree field;
961
962 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
963 if (TREE_CODE (field) == FIELD_DECL)
964 res = res || chkp_type_has_pointer (TREE_TYPE (field));
965 }
966 else if (TREE_CODE (type) == ARRAY_TYPE)
967 res = chkp_type_has_pointer (TREE_TYPE (type));
968
969 return res;
970 }
971
972 unsigned
973 chkp_type_bounds_count (const_tree type)
974 {
975 unsigned res = 0;
976
977 if (!type)
978 res = 0;
979 else if (BOUNDED_TYPE_P (type))
980 res = 1;
981 else if (RECORD_OR_UNION_TYPE_P (type))
982 {
983 bitmap have_bound;
984
985 bitmap_obstack_initialize (NULL);
986 have_bound = BITMAP_ALLOC (NULL);
987 chkp_find_bound_slots (type, have_bound);
988 res = bitmap_count_bits (have_bound);
989 BITMAP_FREE (have_bound);
990 bitmap_obstack_release (NULL);
991 }
992
993 return res;
994 }
995
996 /* Get bounds associated with NODE via
997 chkp_set_bounds call. */
998 tree
999 chkp_get_bounds (tree node)
1000 {
1001 tree *slot;
1002
1003 if (!chkp_bounds_map)
1004 return NULL_TREE;
1005
1006 slot = chkp_bounds_map->get (node);
1007 return slot ? *slot : NULL_TREE;
1008 }
1009
1010 /* Associate bounds VAL with NODE. */
1011 void
1012 chkp_set_bounds (tree node, tree val)
1013 {
1014 if (!chkp_bounds_map)
1015 chkp_bounds_map = new hash_map<tree, tree>;
1016
1017 chkp_bounds_map->put (node, val);
1018 }
1019
1020 /* Check if statically initialized variable VAR require
1021 static bounds initialization. If VAR is added into
1022 bounds initlization list then 1 is returned. Otherwise
1023 return 0. */
1024 extern bool
1025 chkp_register_var_initializer (tree var)
1026 {
1027 if (!flag_check_pointer_bounds
1028 || DECL_INITIAL (var) == error_mark_node)
1029 return false;
1030
1031 gcc_assert (TREE_CODE (var) == VAR_DECL);
1032 gcc_assert (DECL_INITIAL (var));
1033
1034 if (TREE_STATIC (var)
1035 && chkp_type_has_pointer (TREE_TYPE (var)))
1036 {
1037 varpool_node::get_create (var)->need_bounds_init = 1;
1038 return true;
1039 }
1040
1041 return false;
1042 }
1043
1044 /* Helper function for chkp_finish_file.
1045
1046 Add new modification statement (RHS is assigned to LHS)
1047 into list of static initializer statementes (passed in ARG).
1048 If statements list becomes too big, emit checker constructor
1049 and start the new one. */
1050 static void
1051 chkp_add_modification_to_stmt_list (tree lhs,
1052 tree rhs,
1053 void *arg)
1054 {
1055 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1056 tree modify;
1057
1058 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1059 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1060
1061 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1062 append_to_statement_list (modify, &stmts->stmts);
1063
1064 stmts->avail--;
1065 }
1066
1067 /* Build and return ADDR_EXPR for specified object OBJ. */
1068 static tree
1069 chkp_build_addr_expr (tree obj)
1070 {
1071 return TREE_CODE (obj) == TARGET_MEM_REF
1072 ? tree_mem_ref_addr (ptr_type_node, obj)
1073 : build_fold_addr_expr (obj);
1074 }
1075
1076 /* Helper function for chkp_finish_file.
1077 Initialize bound variable BND_VAR with bounds of variable
1078 VAR to statements list STMTS. If statements list becomes
1079 too big, emit checker constructor and start the new one. */
1080 static void
1081 chkp_output_static_bounds (tree bnd_var, tree var,
1082 struct chkp_ctor_stmt_list *stmts)
1083 {
1084 tree lb, ub, size;
1085
1086 if (TREE_CODE (var) == STRING_CST)
1087 {
1088 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1089 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1090 }
1091 else if (DECL_SIZE (var)
1092 && !chkp_variable_size_type (TREE_TYPE (var)))
1093 {
1094 /* Compute bounds using statically known size. */
1095 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1096 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1097 }
1098 else
1099 {
1100 /* Compute bounds using dynamic size. */
1101 tree call;
1102
1103 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1104 call = build1 (ADDR_EXPR,
1105 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1106 chkp_sizeof_fndecl);
1107 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1108 call, 1, var);
1109
1110 if (flag_chkp_zero_dynamic_size_as_infinite)
1111 {
1112 tree max_size, cond;
1113
1114 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1115 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1116 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1117 }
1118
1119 size = size_binop (MINUS_EXPR, size, size_one_node);
1120 }
1121
1122 ub = size_binop (PLUS_EXPR, lb, size);
1123 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1124 &stmts->stmts);
1125 if (stmts->avail <= 0)
1126 {
1127 cgraph_build_static_cdtor ('B', stmts->stmts,
1128 MAX_RESERVED_INIT_PRIORITY + 2);
1129 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1130 stmts->stmts = NULL;
1131 }
1132 }
1133
1134 /* Return entry block to be used for checker initilization code.
1135 Create new block if required. */
1136 static basic_block
1137 chkp_get_entry_block (void)
1138 {
1139 if (!entry_block)
1140 entry_block
1141 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1142
1143 return entry_block;
1144 }
1145
1146 /* Return a bounds var to be used for pointer var PTR_VAR. */
1147 static tree
1148 chkp_get_bounds_var (tree ptr_var)
1149 {
1150 tree bnd_var;
1151 tree *slot;
1152
1153 slot = chkp_bound_vars->get (ptr_var);
1154 if (slot)
1155 bnd_var = *slot;
1156 else
1157 {
1158 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1159 CHKP_BOUND_TMP_NAME);
1160 chkp_bound_vars->put (ptr_var, bnd_var);
1161 }
1162
1163 return bnd_var;
1164 }
1165
1166 /* If BND is an abnormal bounds copy, return a copied value.
1167 Otherwise return BND. */
1168 static tree
1169 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1170 {
1171 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1172 {
1173 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1174 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1175 bnd = gimple_assign_rhs1 (bnd_def);
1176 }
1177
1178 return bnd;
1179 }
1180
1181 /* Register bounds BND for object PTR in global bounds table.
1182 A copy of bounds may be created for abnormal ssa names.
1183 Returns bounds to use for PTR. */
1184 static tree
1185 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1186 {
1187 bool abnormal_ptr;
1188
1189 if (!chkp_reg_bounds)
1190 return bnd;
1191
1192 /* Do nothing if bounds are incomplete_bounds
1193 because it means bounds will be recomputed. */
1194 if (bnd == incomplete_bounds)
1195 return bnd;
1196
1197 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1198 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1199 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1200
1201 /* A single bounds value may be reused multiple times for
1202 different pointer values. It may cause coalescing issues
1203 for abnormal SSA names. To avoid it we create a bounds
1204 copy in case it is computed for abnormal SSA name.
1205
1206 We also cannot reuse such created copies for other pointers */
1207 if (abnormal_ptr
1208 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1209 {
1210 tree bnd_var = NULL_TREE;
1211
1212 if (abnormal_ptr)
1213 {
1214 if (SSA_NAME_VAR (ptr))
1215 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1216 }
1217 else
1218 bnd_var = chkp_get_tmp_var ();
1219
1220 /* For abnormal copies we may just find original
1221 bounds and use them. */
1222 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1223 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1224 /* For undefined values we usually use none bounds
1225 value but in case of abnormal edge it may cause
1226 coalescing failures. Use default definition of
1227 bounds variable instead to avoid it. */
1228 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1229 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1230 {
1231 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1232
1233 if (dump_file && (dump_flags & TDF_DETAILS))
1234 {
1235 fprintf (dump_file, "Using default def bounds ");
1236 print_generic_expr (dump_file, bnd, 0);
1237 fprintf (dump_file, " for abnormal default def SSA name ");
1238 print_generic_expr (dump_file, ptr, 0);
1239 fprintf (dump_file, "\n");
1240 }
1241 }
1242 else
1243 {
1244 tree copy;
1245 gimple def = SSA_NAME_DEF_STMT (ptr);
1246 gimple assign;
1247 gimple_stmt_iterator gsi;
1248
1249 if (bnd_var)
1250 copy = make_ssa_name (bnd_var);
1251 else
1252 copy = make_temp_ssa_name (pointer_bounds_type_node,
1253 NULL,
1254 CHKP_BOUND_TMP_NAME);
1255 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1256 assign = gimple_build_assign (copy, bnd);
1257
1258 if (dump_file && (dump_flags & TDF_DETAILS))
1259 {
1260 fprintf (dump_file, "Creating a copy of bounds ");
1261 print_generic_expr (dump_file, bnd, 0);
1262 fprintf (dump_file, " for abnormal SSA name ");
1263 print_generic_expr (dump_file, ptr, 0);
1264 fprintf (dump_file, "\n");
1265 }
1266
1267 if (gimple_code (def) == GIMPLE_NOP)
1268 {
1269 gsi = gsi_last_bb (chkp_get_entry_block ());
1270 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1271 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1272 else
1273 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1274 }
1275 else
1276 {
1277 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1278 /* Sometimes (e.g. when we load a pointer from a
1279 memory) bounds are produced later than a pointer.
1280 We need to insert bounds copy appropriately. */
1281 if (gimple_code (bnd_def) != GIMPLE_NOP
1282 && stmt_dominates_stmt_p (def, bnd_def))
1283 gsi = gsi_for_stmt (bnd_def);
1284 else
1285 gsi = gsi_for_stmt (def);
1286 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1287 }
1288
1289 bnd = copy;
1290 }
1291
1292 if (abnormal_ptr)
1293 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1294 }
1295
1296 chkp_reg_bounds->put (ptr, bnd);
1297
1298 if (dump_file && (dump_flags & TDF_DETAILS))
1299 {
1300 fprintf (dump_file, "Regsitered bound ");
1301 print_generic_expr (dump_file, bnd, 0);
1302 fprintf (dump_file, " for pointer ");
1303 print_generic_expr (dump_file, ptr, 0);
1304 fprintf (dump_file, "\n");
1305 }
1306
1307 return bnd;
1308 }
1309
1310 /* Get bounds registered for object PTR in global bounds table. */
1311 static tree
1312 chkp_get_registered_bounds (tree ptr)
1313 {
1314 tree *slot;
1315
1316 if (!chkp_reg_bounds)
1317 return NULL_TREE;
1318
1319 slot = chkp_reg_bounds->get (ptr);
1320 return slot ? *slot : NULL_TREE;
1321 }
1322
1323 /* Add bound retvals to return statement pointed by GSI. */
1324
1325 static void
1326 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1327 {
1328 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1329 tree retval = gimple_return_retval (ret);
1330 tree ret_decl = DECL_RESULT (cfun->decl);
1331 tree bounds;
1332
1333 if (!retval)
1334 return;
1335
1336 if (BOUNDED_P (ret_decl))
1337 {
1338 bounds = chkp_find_bounds (retval, gsi);
1339 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1340 gimple_return_set_retbnd (ret, bounds);
1341 }
1342
1343 update_stmt (ret);
1344 }
1345
1346 /* Force OP to be suitable for using as an argument for call.
1347 New statements (if any) go to SEQ. */
1348 static tree
1349 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1350 {
1351 gimple_seq stmts;
1352 gimple_stmt_iterator si;
1353
1354 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1355
1356 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1357 chkp_mark_stmt (gsi_stmt (si));
1358
1359 gimple_seq_add_seq (seq, stmts);
1360
1361 return op;
1362 }
1363
1364 /* Generate lower bound check for memory access by ADDR.
1365 Check is inserted before the position pointed by ITER.
1366 DIRFLAG indicates whether memory access is load or store. */
1367 static void
1368 chkp_check_lower (tree addr, tree bounds,
1369 gimple_stmt_iterator iter,
1370 location_t location,
1371 tree dirflag)
1372 {
1373 gimple_seq seq;
1374 gimple check;
1375 tree node;
1376
1377 if (!chkp_function_instrumented_p (current_function_decl)
1378 && bounds == chkp_get_zero_bounds ())
1379 return;
1380
1381 if (dirflag == integer_zero_node
1382 && !flag_chkp_check_read)
1383 return;
1384
1385 if (dirflag == integer_one_node
1386 && !flag_chkp_check_write)
1387 return;
1388
1389 seq = NULL;
1390
1391 node = chkp_force_gimple_call_op (addr, &seq);
1392
1393 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1394 chkp_mark_stmt (check);
1395 gimple_call_set_with_bounds (check, true);
1396 gimple_set_location (check, location);
1397 gimple_seq_add_stmt (&seq, check);
1398
1399 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1400
1401 if (dump_file && (dump_flags & TDF_DETAILS))
1402 {
1403 gimple before = gsi_stmt (iter);
1404 fprintf (dump_file, "Generated lower bound check for statement ");
1405 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1406 fprintf (dump_file, " ");
1407 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1408 }
1409 }
1410
1411 /* Generate upper bound check for memory access by ADDR.
1412 Check is inserted before the position pointed by ITER.
1413 DIRFLAG indicates whether memory access is load or store. */
1414 static void
1415 chkp_check_upper (tree addr, tree bounds,
1416 gimple_stmt_iterator iter,
1417 location_t location,
1418 tree dirflag)
1419 {
1420 gimple_seq seq;
1421 gimple check;
1422 tree node;
1423
1424 if (!chkp_function_instrumented_p (current_function_decl)
1425 && bounds == chkp_get_zero_bounds ())
1426 return;
1427
1428 if (dirflag == integer_zero_node
1429 && !flag_chkp_check_read)
1430 return;
1431
1432 if (dirflag == integer_one_node
1433 && !flag_chkp_check_write)
1434 return;
1435
1436 seq = NULL;
1437
1438 node = chkp_force_gimple_call_op (addr, &seq);
1439
1440 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1441 chkp_mark_stmt (check);
1442 gimple_call_set_with_bounds (check, true);
1443 gimple_set_location (check, location);
1444 gimple_seq_add_stmt (&seq, check);
1445
1446 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1447
1448 if (dump_file && (dump_flags & TDF_DETAILS))
1449 {
1450 gimple before = gsi_stmt (iter);
1451 fprintf (dump_file, "Generated upper bound check for statement ");
1452 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1453 fprintf (dump_file, " ");
1454 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1455 }
1456 }
1457
1458 /* Generate lower and upper bound checks for memory access
1459 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1460 are inserted before the position pointed by ITER.
1461 DIRFLAG indicates whether memory access is load or store. */
1462 void
1463 chkp_check_mem_access (tree first, tree last, tree bounds,
1464 gimple_stmt_iterator iter,
1465 location_t location,
1466 tree dirflag)
1467 {
1468 chkp_check_lower (first, bounds, iter, location, dirflag);
1469 chkp_check_upper (last, bounds, iter, location, dirflag);
1470 }
1471
1472 /* Replace call to _bnd_chk_* pointed by GSI with
1473 bndcu and bndcl calls. DIRFLAG determines whether
1474 check is for read or write. */
1475
1476 void
1477 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1478 tree dirflag)
1479 {
1480 gimple_stmt_iterator call_iter = *gsi;
1481 gimple call = gsi_stmt (*gsi);
1482 tree fndecl = gimple_call_fndecl (call);
1483 tree addr = gimple_call_arg (call, 0);
1484 tree bounds = chkp_find_bounds (addr, gsi);
1485
1486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1487 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1488 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1489
1490 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1491 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1492
1493 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1494 {
1495 tree size = gimple_call_arg (call, 1);
1496 addr = fold_build_pointer_plus (addr, size);
1497 addr = fold_build_pointer_plus_hwi (addr, -1);
1498 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1499 }
1500
1501 gsi_remove (&call_iter, true);
1502 }
1503
1504 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1505 corresponding bounds extract call. */
1506
1507 void
1508 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1509 {
1510 gimple call = gsi_stmt (*gsi);
1511 tree fndecl = gimple_call_fndecl (call);
1512 tree addr = gimple_call_arg (call, 0);
1513 tree bounds = chkp_find_bounds (addr, gsi);
1514 gimple extract;
1515
1516 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1517 fndecl = chkp_extract_lower_fndecl;
1518 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1519 fndecl = chkp_extract_upper_fndecl;
1520 else
1521 gcc_unreachable ();
1522
1523 extract = gimple_build_call (fndecl, 1, bounds);
1524 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1525 chkp_mark_stmt (extract);
1526
1527 gsi_replace (gsi, extract, false);
1528 }
1529
1530 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1531 static tree
1532 chkp_build_component_ref (tree obj, tree field)
1533 {
1534 tree res;
1535
1536 /* If object is TMR then we do not use component_ref but
1537 add offset instead. We need it to be able to get addr
1538 of the reasult later. */
1539 if (TREE_CODE (obj) == TARGET_MEM_REF)
1540 {
1541 tree offs = TMR_OFFSET (obj);
1542 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1543 offs, DECL_FIELD_OFFSET (field));
1544
1545 gcc_assert (offs);
1546
1547 res = copy_node (obj);
1548 TREE_TYPE (res) = TREE_TYPE (field);
1549 TMR_OFFSET (res) = offs;
1550 }
1551 else
1552 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1553
1554 return res;
1555 }
1556
1557 /* Return ARRAY_REF for array ARR and index IDX with
1558 specified element type ETYPE and element size ESIZE. */
1559 static tree
1560 chkp_build_array_ref (tree arr, tree etype, tree esize,
1561 unsigned HOST_WIDE_INT idx)
1562 {
1563 tree index = build_int_cst (size_type_node, idx);
1564 tree res;
1565
1566 /* If object is TMR then we do not use array_ref but
1567 add offset instead. We need it to be able to get addr
1568 of the reasult later. */
1569 if (TREE_CODE (arr) == TARGET_MEM_REF)
1570 {
1571 tree offs = TMR_OFFSET (arr);
1572
1573 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1574 esize, index);
1575 gcc_assert(esize);
1576
1577 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1578 offs, esize);
1579 gcc_assert (offs);
1580
1581 res = copy_node (arr);
1582 TREE_TYPE (res) = etype;
1583 TMR_OFFSET (res) = offs;
1584 }
1585 else
1586 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1587
1588 return res;
1589 }
1590
1591 /* Helper function for chkp_add_bounds_to_call_stmt.
1592 Fill ALL_BOUNDS output array with created bounds.
1593
1594 OFFS is used for recursive calls and holds basic
1595 offset of TYPE in outer structure in bits.
1596
1597 ITER points a position where bounds are searched.
1598
1599 ALL_BOUNDS[i] is filled with elem bounds if there
1600 is a field in TYPE which has pointer type and offset
1601 equal to i * POINTER_SIZE in bits. */
1602 static void
1603 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1604 HOST_WIDE_INT offs,
1605 gimple_stmt_iterator *iter)
1606 {
1607 tree type = TREE_TYPE (elem);
1608
1609 if (BOUNDED_TYPE_P (type))
1610 {
1611 if (!all_bounds[offs / POINTER_SIZE])
1612 {
1613 tree temp = make_temp_ssa_name (type, NULL, "");
1614 gimple assign = gimple_build_assign (temp, elem);
1615 gimple_stmt_iterator gsi;
1616
1617 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1618 gsi = gsi_for_stmt (assign);
1619
1620 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1621 }
1622 }
1623 else if (RECORD_OR_UNION_TYPE_P (type))
1624 {
1625 tree field;
1626
1627 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1628 if (TREE_CODE (field) == FIELD_DECL)
1629 {
1630 tree base = unshare_expr (elem);
1631 tree field_ref = chkp_build_component_ref (base, field);
1632 HOST_WIDE_INT field_offs
1633 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1634 if (DECL_FIELD_OFFSET (field))
1635 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1636
1637 chkp_find_bounds_for_elem (field_ref, all_bounds,
1638 offs + field_offs, iter);
1639 }
1640 }
1641 else if (TREE_CODE (type) == ARRAY_TYPE)
1642 {
1643 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1644 tree etype = TREE_TYPE (type);
1645 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1646 unsigned HOST_WIDE_INT cur;
1647
1648 if (!maxval || integer_minus_onep (maxval))
1649 return;
1650
1651 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1652 {
1653 tree base = unshare_expr (elem);
1654 tree arr_elem = chkp_build_array_ref (base, etype,
1655 TYPE_SIZE (etype),
1656 cur);
1657 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1658 iter);
1659 }
1660 }
1661 }
1662
1663 /* Fill HAVE_BOUND output bitmap with information about
1664 bounds requred for object of type TYPE.
1665
1666 OFFS is used for recursive calls and holds basic
1667 offset of TYPE in outer structure in bits.
1668
1669 HAVE_BOUND[i] is set to 1 if there is a field
1670 in TYPE which has pointer type and offset
1671 equal to i * POINTER_SIZE - OFFS in bits. */
1672 void
1673 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1674 HOST_WIDE_INT offs)
1675 {
1676 if (BOUNDED_TYPE_P (type))
1677 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1678 else if (RECORD_OR_UNION_TYPE_P (type))
1679 {
1680 tree field;
1681
1682 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1683 if (TREE_CODE (field) == FIELD_DECL)
1684 {
1685 HOST_WIDE_INT field_offs
1686 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1687 if (DECL_FIELD_OFFSET (field))
1688 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1689 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1690 offs + field_offs);
1691 }
1692 }
1693 else if (TREE_CODE (type) == ARRAY_TYPE)
1694 {
1695 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1696 tree etype = TREE_TYPE (type);
1697 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1698 unsigned HOST_WIDE_INT cur;
1699
1700 if (!maxval
1701 || TREE_CODE (maxval) != INTEGER_CST
1702 || integer_minus_onep (maxval))
1703 return;
1704
1705 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1706 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1707 }
1708 }
1709
1710 /* Fill bitmap RES with information about bounds for
1711 type TYPE. See chkp_find_bound_slots_1 for more
1712 details. */
1713 void
1714 chkp_find_bound_slots (const_tree type, bitmap res)
1715 {
1716 bitmap_clear (res);
1717 chkp_find_bound_slots_1 (type, res, 0);
1718 }
1719
1720 /* Return 1 if call to FNDECL should be instrumented
1721 and 0 otherwise. */
1722
1723 static bool
1724 chkp_instrument_normal_builtin (tree fndecl)
1725 {
1726 switch (DECL_FUNCTION_CODE (fndecl))
1727 {
1728 case BUILT_IN_STRLEN:
1729 case BUILT_IN_STRCPY:
1730 case BUILT_IN_STRNCPY:
1731 case BUILT_IN_STPCPY:
1732 case BUILT_IN_STPNCPY:
1733 case BUILT_IN_STRCAT:
1734 case BUILT_IN_STRNCAT:
1735 case BUILT_IN_MEMCPY:
1736 case BUILT_IN_MEMPCPY:
1737 case BUILT_IN_MEMSET:
1738 case BUILT_IN_MEMMOVE:
1739 case BUILT_IN_BZERO:
1740 case BUILT_IN_STRCMP:
1741 case BUILT_IN_STRNCMP:
1742 case BUILT_IN_BCMP:
1743 case BUILT_IN_MEMCMP:
1744 case BUILT_IN_MEMCPY_CHK:
1745 case BUILT_IN_MEMPCPY_CHK:
1746 case BUILT_IN_MEMMOVE_CHK:
1747 case BUILT_IN_MEMSET_CHK:
1748 case BUILT_IN_STRCPY_CHK:
1749 case BUILT_IN_STRNCPY_CHK:
1750 case BUILT_IN_STPCPY_CHK:
1751 case BUILT_IN_STPNCPY_CHK:
1752 case BUILT_IN_STRCAT_CHK:
1753 case BUILT_IN_STRNCAT_CHK:
1754 case BUILT_IN_MALLOC:
1755 case BUILT_IN_CALLOC:
1756 case BUILT_IN_REALLOC:
1757 return 1;
1758
1759 default:
1760 return 0;
1761 }
1762 }
1763
1764 /* Add bound arguments to call statement pointed by GSI.
1765 Also performs a replacement of user checker builtins calls
1766 with internal ones. */
1767
1768 static void
1769 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1770 {
1771 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1772 unsigned arg_no = 0;
1773 tree fndecl = gimple_call_fndecl (call);
1774 tree fntype;
1775 tree first_formal_arg;
1776 tree arg;
1777 bool use_fntype = false;
1778 tree op;
1779 ssa_op_iter iter;
1780 gcall *new_call;
1781
1782 /* Do nothing for internal functions. */
1783 if (gimple_call_internal_p (call))
1784 return;
1785
1786 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1787
1788 /* Do nothing if back-end builtin is called. */
1789 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1790 return;
1791
1792 /* Do nothing for some middle-end builtins. */
1793 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1794 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1795 return;
1796
1797 /* Do nothing for calls to not instrumentable functions. */
1798 if (fndecl && !chkp_instrumentable_p (fndecl))
1799 return;
1800
1801 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1802 and CHKP_COPY_PTR_BOUNDS. */
1803 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1804 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1805 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1806 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1807 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1808 return;
1809
1810 /* Check user builtins are replaced with checks. */
1811 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1812 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1813 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1814 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1815 {
1816 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1817 return;
1818 }
1819
1820 /* Check user builtins are replaced with bound extract. */
1821 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1822 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1823 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1824 {
1825 chkp_replace_extract_builtin (gsi);
1826 return;
1827 }
1828
1829 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1830 target narrow bounds call. */
1831 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1832 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1833 {
1834 tree arg = gimple_call_arg (call, 1);
1835 tree bounds = chkp_find_bounds (arg, gsi);
1836
1837 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1838 gimple_call_set_arg (call, 1, bounds);
1839 update_stmt (call);
1840
1841 return;
1842 }
1843
1844 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1845 bndstx call. */
1846 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1847 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1848 {
1849 tree addr = gimple_call_arg (call, 0);
1850 tree ptr = gimple_call_arg (call, 1);
1851 tree bounds = chkp_find_bounds (ptr, gsi);
1852 gimple_stmt_iterator iter = gsi_for_stmt (call);
1853
1854 chkp_build_bndstx (addr, ptr, bounds, gsi);
1855 gsi_remove (&iter, true);
1856
1857 return;
1858 }
1859
1860 if (!flag_chkp_instrument_calls)
1861 return;
1862
1863 /* We instrument only some subset of builtins. We also instrument
1864 builtin calls to be inlined. */
1865 if (fndecl
1866 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1867 && !chkp_instrument_normal_builtin (fndecl))
1868 {
1869 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1870 return;
1871
1872 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1873 if (!clone
1874 || !gimple_has_body_p (clone->decl))
1875 return;
1876 }
1877
1878 /* If function decl is available then use it for
1879 formal arguments list. Otherwise use function type. */
1880 if (fndecl && DECL_ARGUMENTS (fndecl))
1881 first_formal_arg = DECL_ARGUMENTS (fndecl);
1882 else
1883 {
1884 first_formal_arg = TYPE_ARG_TYPES (fntype);
1885 use_fntype = true;
1886 }
1887
1888 /* Fill vector of new call args. */
1889 vec<tree> new_args = vNULL;
1890 new_args.create (gimple_call_num_args (call));
1891 arg = first_formal_arg;
1892 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1893 {
1894 tree call_arg = gimple_call_arg (call, arg_no);
1895 tree type;
1896
1897 /* Get arg type using formal argument description
1898 or actual argument type. */
1899 if (arg)
1900 if (use_fntype)
1901 if (TREE_VALUE (arg) != void_type_node)
1902 {
1903 type = TREE_VALUE (arg);
1904 arg = TREE_CHAIN (arg);
1905 }
1906 else
1907 type = TREE_TYPE (call_arg);
1908 else
1909 {
1910 type = TREE_TYPE (arg);
1911 arg = TREE_CHAIN (arg);
1912 }
1913 else
1914 type = TREE_TYPE (call_arg);
1915
1916 new_args.safe_push (call_arg);
1917
1918 if (BOUNDED_TYPE_P (type)
1919 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1920 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1921 else if (chkp_type_has_pointer (type))
1922 {
1923 HOST_WIDE_INT max_bounds
1924 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1925 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1926 HOST_WIDE_INT bnd_no;
1927
1928 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1929
1930 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1931
1932 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1933 if (all_bounds[bnd_no])
1934 new_args.safe_push (all_bounds[bnd_no]);
1935
1936 free (all_bounds);
1937 }
1938 }
1939
1940 if (new_args.length () == gimple_call_num_args (call))
1941 new_call = call;
1942 else
1943 {
1944 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1945 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1946 gimple_call_copy_flags (new_call, call);
1947 gimple_call_set_chain (new_call, gimple_call_chain (call));
1948 }
1949 new_args.release ();
1950
1951 /* For direct calls fndecl is replaced with instrumented version. */
1952 if (fndecl)
1953 {
1954 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1955 gimple_call_set_fndecl (new_call, new_decl);
1956 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1957 }
1958 /* For indirect call we should fix function pointer type if
1959 pass some bounds. */
1960 else if (new_call != call)
1961 {
1962 tree type = gimple_call_fntype (call);
1963 type = chkp_copy_function_type_adding_bounds (type);
1964 gimple_call_set_fntype (new_call, type);
1965 }
1966
1967 /* replace old call statement with the new one. */
1968 if (call != new_call)
1969 {
1970 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1971 {
1972 SSA_NAME_DEF_STMT (op) = new_call;
1973 }
1974 gsi_replace (gsi, new_call, true);
1975 }
1976 else
1977 update_stmt (new_call);
1978
1979 gimple_call_set_with_bounds (new_call, true);
1980 }
1981
1982 /* Return constant static bounds var with specified bounds LB and UB.
1983 If such var does not exists then new var is created with specified NAME. */
1984 static tree
1985 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1986 HOST_WIDE_INT ub,
1987 const char *name)
1988 {
1989 tree id = get_identifier (name);
1990 tree var;
1991 varpool_node *node;
1992 symtab_node *snode;
1993
1994 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1995 pointer_bounds_type_node);
1996 TREE_STATIC (var) = 1;
1997 TREE_PUBLIC (var) = 1;
1998
1999 /* With LTO we may have constant bounds already in varpool.
2000 Try to find it. */
2001 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
2002 {
2003 /* We don't allow this symbol usage for non bounds. */
2004 if (snode->type != SYMTAB_VARIABLE
2005 || !POINTER_BOUNDS_P (snode->decl))
2006 sorry ("-fcheck-pointer-bounds requires '%s' "
2007 "name for internal usage",
2008 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
2009
2010 return snode->decl;
2011 }
2012
2013 TREE_USED (var) = 1;
2014 TREE_READONLY (var) = 1;
2015 TREE_ADDRESSABLE (var) = 0;
2016 DECL_ARTIFICIAL (var) = 1;
2017 DECL_READ_P (var) = 1;
2018 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2019 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2020 /* We may use this symbol during ctors generation in chkp_finish_file
2021 when all symbols are emitted. Force output to avoid undefined
2022 symbols in ctors. */
2023 node = varpool_node::get_create (var);
2024 node->force_output = 1;
2025
2026 varpool_node::finalize_decl (var);
2027
2028 return var;
2029 }
2030
2031 /* Generate code to make bounds with specified lower bound LB and SIZE.
2032 if AFTER is 1 then code is inserted after position pointed by ITER
2033 otherwise code is inserted before position pointed by ITER.
2034 If ITER is NULL then code is added to entry block. */
2035 static tree
2036 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2037 {
2038 gimple_seq seq;
2039 gimple_stmt_iterator gsi;
2040 gimple stmt;
2041 tree bounds;
2042
2043 if (iter)
2044 gsi = *iter;
2045 else
2046 gsi = gsi_start_bb (chkp_get_entry_block ());
2047
2048 seq = NULL;
2049
2050 lb = chkp_force_gimple_call_op (lb, &seq);
2051 size = chkp_force_gimple_call_op (size, &seq);
2052
2053 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2054 chkp_mark_stmt (stmt);
2055
2056 bounds = chkp_get_tmp_reg (stmt);
2057 gimple_call_set_lhs (stmt, bounds);
2058
2059 gimple_seq_add_stmt (&seq, stmt);
2060
2061 if (iter && after)
2062 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2063 else
2064 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2065
2066 if (dump_file && (dump_flags & TDF_DETAILS))
2067 {
2068 fprintf (dump_file, "Made bounds: ");
2069 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2070 if (iter)
2071 {
2072 fprintf (dump_file, " inserted before statement: ");
2073 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2074 }
2075 else
2076 fprintf (dump_file, " at function entry\n");
2077 }
2078
2079 /* update_stmt (stmt); */
2080
2081 return bounds;
2082 }
2083
2084 /* Return var holding zero bounds. */
2085 tree
2086 chkp_get_zero_bounds_var (void)
2087 {
2088 if (!chkp_zero_bounds_var)
2089 chkp_zero_bounds_var
2090 = chkp_make_static_const_bounds (0, -1,
2091 CHKP_ZERO_BOUNDS_VAR_NAME);
2092 return chkp_zero_bounds_var;
2093 }
2094
2095 /* Return var holding none bounds. */
2096 tree
2097 chkp_get_none_bounds_var (void)
2098 {
2099 if (!chkp_none_bounds_var)
2100 chkp_none_bounds_var
2101 = chkp_make_static_const_bounds (-1, 0,
2102 CHKP_NONE_BOUNDS_VAR_NAME);
2103 return chkp_none_bounds_var;
2104 }
2105
2106 /* Return SSA_NAME used to represent zero bounds. */
2107 static tree
2108 chkp_get_zero_bounds (void)
2109 {
2110 if (zero_bounds)
2111 return zero_bounds;
2112
2113 if (dump_file && (dump_flags & TDF_DETAILS))
2114 fprintf (dump_file, "Creating zero bounds...");
2115
2116 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2117 || flag_chkp_use_static_const_bounds > 0)
2118 {
2119 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2120 gimple stmt;
2121
2122 zero_bounds = chkp_get_tmp_reg (NULL);
2123 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2124 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2125 }
2126 else
2127 zero_bounds = chkp_make_bounds (integer_zero_node,
2128 integer_zero_node,
2129 NULL,
2130 false);
2131
2132 return zero_bounds;
2133 }
2134
2135 /* Return SSA_NAME used to represent none bounds. */
2136 static tree
2137 chkp_get_none_bounds (void)
2138 {
2139 if (none_bounds)
2140 return none_bounds;
2141
2142 if (dump_file && (dump_flags & TDF_DETAILS))
2143 fprintf (dump_file, "Creating none bounds...");
2144
2145
2146 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2147 || flag_chkp_use_static_const_bounds > 0)
2148 {
2149 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2150 gimple stmt;
2151
2152 none_bounds = chkp_get_tmp_reg (NULL);
2153 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2154 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2155 }
2156 else
2157 none_bounds = chkp_make_bounds (integer_minus_one_node,
2158 build_int_cst (size_type_node, 2),
2159 NULL,
2160 false);
2161
2162 return none_bounds;
2163 }
2164
2165 /* Return bounds to be used as a result of operation which
2166 should not create poiunter (e.g. MULT_EXPR). */
2167 static tree
2168 chkp_get_invalid_op_bounds (void)
2169 {
2170 return chkp_get_zero_bounds ();
2171 }
2172
2173 /* Return bounds to be used for loads of non-pointer values. */
2174 static tree
2175 chkp_get_nonpointer_load_bounds (void)
2176 {
2177 return chkp_get_zero_bounds ();
2178 }
2179
2180 /* Return 1 if may use bndret call to get bounds for pointer
2181 returned by CALL. */
2182 static bool
2183 chkp_call_returns_bounds_p (gcall *call)
2184 {
2185 if (gimple_call_internal_p (call))
2186 return false;
2187
2188 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2189 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2190 return true;
2191
2192 if (gimple_call_with_bounds_p (call))
2193 return true;
2194
2195 tree fndecl = gimple_call_fndecl (call);
2196
2197 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2198 return false;
2199
2200 if (fndecl && !chkp_instrumentable_p (fndecl))
2201 return false;
2202
2203 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2204 {
2205 if (chkp_instrument_normal_builtin (fndecl))
2206 return true;
2207
2208 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2209 return false;
2210
2211 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2212 return (clone && gimple_has_body_p (clone->decl));
2213 }
2214
2215 return true;
2216 }
2217
2218 /* Build bounds returned by CALL. */
2219 static tree
2220 chkp_build_returned_bound (gcall *call)
2221 {
2222 gimple_stmt_iterator gsi;
2223 tree bounds;
2224 gimple stmt;
2225 tree fndecl = gimple_call_fndecl (call);
2226 unsigned int retflags;
2227
2228 /* To avoid fixing alloca expands in targets we handle
2229 it separately. */
2230 if (fndecl
2231 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2232 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2233 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2234 {
2235 tree size = gimple_call_arg (call, 0);
2236 tree lb = gimple_call_lhs (call);
2237 gimple_stmt_iterator iter = gsi_for_stmt (call);
2238 bounds = chkp_make_bounds (lb, size, &iter, true);
2239 }
2240 /* We know bounds returned by set_bounds builtin call. */
2241 else if (fndecl
2242 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2243 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2244 {
2245 tree lb = gimple_call_arg (call, 0);
2246 tree size = gimple_call_arg (call, 1);
2247 gimple_stmt_iterator iter = gsi_for_stmt (call);
2248 bounds = chkp_make_bounds (lb, size, &iter, true);
2249 }
2250 /* Detect bounds initialization calls. */
2251 else if (fndecl
2252 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2253 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2254 bounds = chkp_get_zero_bounds ();
2255 /* Detect bounds nullification calls. */
2256 else if (fndecl
2257 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2258 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2259 bounds = chkp_get_none_bounds ();
2260 /* Detect bounds copy calls. */
2261 else if (fndecl
2262 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2263 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2264 {
2265 gimple_stmt_iterator iter = gsi_for_stmt (call);
2266 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2267 }
2268 /* Do not use retbnd when returned bounds are equal to some
2269 of passed bounds. */
2270 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2271 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2272 {
2273 gimple_stmt_iterator iter = gsi_for_stmt (call);
2274 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2275 if (gimple_call_with_bounds_p (call))
2276 {
2277 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2278 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2279 {
2280 if (retarg)
2281 retarg--;
2282 else
2283 break;
2284 }
2285 }
2286 else
2287 argno = retarg;
2288
2289 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2290 }
2291 else if (chkp_call_returns_bounds_p (call))
2292 {
2293 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2294
2295 /* In general case build checker builtin call to
2296 obtain returned bounds. */
2297 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2298 gimple_call_lhs (call));
2299 chkp_mark_stmt (stmt);
2300
2301 gsi = gsi_for_stmt (call);
2302 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2303
2304 bounds = chkp_get_tmp_reg (stmt);
2305 gimple_call_set_lhs (stmt, bounds);
2306
2307 update_stmt (stmt);
2308 }
2309 else
2310 bounds = chkp_get_zero_bounds ();
2311
2312 if (dump_file && (dump_flags & TDF_DETAILS))
2313 {
2314 fprintf (dump_file, "Built returned bounds (");
2315 print_generic_expr (dump_file, bounds, 0);
2316 fprintf (dump_file, ") for call: ");
2317 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2318 }
2319
2320 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2321
2322 return bounds;
2323 }
2324
2325 /* Return bounds used as returned by call
2326 which produced SSA name VAL. */
2327 gcall *
2328 chkp_retbnd_call_by_val (tree val)
2329 {
2330 if (TREE_CODE (val) != SSA_NAME)
2331 return NULL;
2332
2333 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2334
2335 imm_use_iterator use_iter;
2336 use_operand_p use_p;
2337 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2338 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2339 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2340 return as_a <gcall *> (USE_STMT (use_p));
2341
2342 return NULL;
2343 }
2344
2345 /* Check the next parameter for the given PARM is bounds
2346 and return it's default SSA_NAME (create if required). */
2347 static tree
2348 chkp_get_next_bounds_parm (tree parm)
2349 {
2350 tree bounds = TREE_CHAIN (parm);
2351 gcc_assert (POINTER_BOUNDS_P (bounds));
2352 bounds = ssa_default_def (cfun, bounds);
2353 if (!bounds)
2354 {
2355 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2356 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2357 }
2358 return bounds;
2359 }
2360
2361 /* Return bounds to be used for input argument PARM. */
2362 static tree
2363 chkp_get_bound_for_parm (tree parm)
2364 {
2365 tree decl = SSA_NAME_VAR (parm);
2366 tree bounds;
2367
2368 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2369
2370 bounds = chkp_get_registered_bounds (parm);
2371
2372 if (!bounds)
2373 bounds = chkp_get_registered_bounds (decl);
2374
2375 if (!bounds)
2376 {
2377 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2378
2379 /* For static chain param we return zero bounds
2380 because currently we do not check dereferences
2381 of this pointer. */
2382 if (cfun->static_chain_decl == decl)
2383 bounds = chkp_get_zero_bounds ();
2384 /* If non instrumented runtime is used then it may be useful
2385 to use zero bounds for input arguments of main
2386 function. */
2387 else if (flag_chkp_zero_input_bounds_for_main
2388 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2389 "main") == 0)
2390 bounds = chkp_get_zero_bounds ();
2391 else if (BOUNDED_P (parm))
2392 {
2393 bounds = chkp_get_next_bounds_parm (decl);
2394 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2395
2396 if (dump_file && (dump_flags & TDF_DETAILS))
2397 {
2398 fprintf (dump_file, "Built arg bounds (");
2399 print_generic_expr (dump_file, bounds, 0);
2400 fprintf (dump_file, ") for arg: ");
2401 print_node (dump_file, "", decl, 0);
2402 }
2403 }
2404 else
2405 bounds = chkp_get_zero_bounds ();
2406 }
2407
2408 if (!chkp_get_registered_bounds (parm))
2409 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2410
2411 if (dump_file && (dump_flags & TDF_DETAILS))
2412 {
2413 fprintf (dump_file, "Using bounds ");
2414 print_generic_expr (dump_file, bounds, 0);
2415 fprintf (dump_file, " for parm ");
2416 print_generic_expr (dump_file, parm, 0);
2417 fprintf (dump_file, " of type ");
2418 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2419 fprintf (dump_file, ".\n");
2420 }
2421
2422 return bounds;
2423 }
2424
2425 /* Build and return CALL_EXPR for bndstx builtin with specified
2426 arguments. */
2427 tree
2428 chkp_build_bndldx_call (tree addr, tree ptr)
2429 {
2430 tree fn = build1 (ADDR_EXPR,
2431 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2432 chkp_bndldx_fndecl);
2433 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2434 fn, 2, addr, ptr);
2435 CALL_WITH_BOUNDS_P (call) = true;
2436 return call;
2437 }
2438
2439 /* Insert code to load bounds for PTR located by ADDR.
2440 Code is inserted after position pointed by GSI.
2441 Loaded bounds are returned. */
2442 static tree
2443 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2444 {
2445 gimple_seq seq;
2446 gimple stmt;
2447 tree bounds;
2448
2449 seq = NULL;
2450
2451 addr = chkp_force_gimple_call_op (addr, &seq);
2452 ptr = chkp_force_gimple_call_op (ptr, &seq);
2453
2454 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2455 chkp_mark_stmt (stmt);
2456 bounds = chkp_get_tmp_reg (stmt);
2457 gimple_call_set_lhs (stmt, bounds);
2458
2459 gimple_seq_add_stmt (&seq, stmt);
2460
2461 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2462
2463 if (dump_file && (dump_flags & TDF_DETAILS))
2464 {
2465 fprintf (dump_file, "Generated bndldx for pointer ");
2466 print_generic_expr (dump_file, ptr, 0);
2467 fprintf (dump_file, ": ");
2468 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2469 }
2470
2471 return bounds;
2472 }
2473
2474 /* Build and return CALL_EXPR for bndstx builtin with specified
2475 arguments. */
2476 tree
2477 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2478 {
2479 tree fn = build1 (ADDR_EXPR,
2480 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2481 chkp_bndstx_fndecl);
2482 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2483 fn, 3, ptr, bounds, addr);
2484 CALL_WITH_BOUNDS_P (call) = true;
2485 return call;
2486 }
2487
2488 /* Insert code to store BOUNDS for PTR stored by ADDR.
2489 New statements are inserted after position pointed
2490 by GSI. */
2491 void
2492 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2493 gimple_stmt_iterator *gsi)
2494 {
2495 gimple_seq seq;
2496 gimple stmt;
2497
2498 seq = NULL;
2499
2500 addr = chkp_force_gimple_call_op (addr, &seq);
2501 ptr = chkp_force_gimple_call_op (ptr, &seq);
2502
2503 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2504 chkp_mark_stmt (stmt);
2505 gimple_call_set_with_bounds (stmt, true);
2506
2507 gimple_seq_add_stmt (&seq, stmt);
2508
2509 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2510
2511 if (dump_file && (dump_flags & TDF_DETAILS))
2512 {
2513 fprintf (dump_file, "Generated bndstx for pointer store ");
2514 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2515 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2516 }
2517 }
2518
2519 /* Compute bounds for pointer NODE which was assigned in
2520 assignment statement ASSIGN. Return computed bounds. */
2521 static tree
2522 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2523 {
2524 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2525 tree rhs1 = gimple_assign_rhs1 (assign);
2526 tree bounds = NULL_TREE;
2527 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2528 tree base = NULL;
2529
2530 if (dump_file && (dump_flags & TDF_DETAILS))
2531 {
2532 fprintf (dump_file, "Computing bounds for assignment: ");
2533 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2534 }
2535
2536 switch (rhs_code)
2537 {
2538 case MEM_REF:
2539 case TARGET_MEM_REF:
2540 case COMPONENT_REF:
2541 case ARRAY_REF:
2542 /* We need to load bounds from the bounds table. */
2543 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2544 break;
2545
2546 case VAR_DECL:
2547 case SSA_NAME:
2548 case ADDR_EXPR:
2549 case POINTER_PLUS_EXPR:
2550 case NOP_EXPR:
2551 case CONVERT_EXPR:
2552 case INTEGER_CST:
2553 /* Bounds are just propagated from RHS. */
2554 bounds = chkp_find_bounds (rhs1, &iter);
2555 base = rhs1;
2556 break;
2557
2558 case VIEW_CONVERT_EXPR:
2559 /* Bounds are just propagated from RHS. */
2560 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2561 break;
2562
2563 case PARM_DECL:
2564 if (BOUNDED_P (rhs1))
2565 {
2566 /* We need to load bounds from the bounds table. */
2567 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2568 node, &iter);
2569 TREE_ADDRESSABLE (rhs1) = 1;
2570 }
2571 else
2572 bounds = chkp_get_nonpointer_load_bounds ();
2573 break;
2574
2575 case MINUS_EXPR:
2576 case PLUS_EXPR:
2577 case BIT_AND_EXPR:
2578 case BIT_IOR_EXPR:
2579 case BIT_XOR_EXPR:
2580 {
2581 tree rhs2 = gimple_assign_rhs2 (assign);
2582 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2583 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2584
2585 /* First we try to check types of operands. If it
2586 does not help then look at bound values.
2587
2588 If some bounds are incomplete and other are
2589 not proven to be valid (i.e. also incomplete
2590 or invalid because value is not pointer) then
2591 resulting value is incomplete and will be
2592 recomputed later in chkp_finish_incomplete_bounds. */
2593 if (BOUNDED_P (rhs1)
2594 && !BOUNDED_P (rhs2))
2595 bounds = bnd1;
2596 else if (BOUNDED_P (rhs2)
2597 && !BOUNDED_P (rhs1)
2598 && rhs_code != MINUS_EXPR)
2599 bounds = bnd2;
2600 else if (chkp_incomplete_bounds (bnd1))
2601 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2602 && !chkp_incomplete_bounds (bnd2))
2603 bounds = bnd2;
2604 else
2605 bounds = incomplete_bounds;
2606 else if (chkp_incomplete_bounds (bnd2))
2607 if (chkp_valid_bounds (bnd1)
2608 && !chkp_incomplete_bounds (bnd1))
2609 bounds = bnd1;
2610 else
2611 bounds = incomplete_bounds;
2612 else if (!chkp_valid_bounds (bnd1))
2613 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2614 bounds = bnd2;
2615 else if (bnd2 == chkp_get_zero_bounds ())
2616 bounds = bnd2;
2617 else
2618 bounds = bnd1;
2619 else if (!chkp_valid_bounds (bnd2))
2620 bounds = bnd1;
2621 else
2622 /* Seems both operands may have valid bounds
2623 (e.g. pointer minus pointer). In such case
2624 use default invalid op bounds. */
2625 bounds = chkp_get_invalid_op_bounds ();
2626
2627 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2628 }
2629 break;
2630
2631 case BIT_NOT_EXPR:
2632 case NEGATE_EXPR:
2633 case LSHIFT_EXPR:
2634 case RSHIFT_EXPR:
2635 case LROTATE_EXPR:
2636 case RROTATE_EXPR:
2637 case EQ_EXPR:
2638 case NE_EXPR:
2639 case LT_EXPR:
2640 case LE_EXPR:
2641 case GT_EXPR:
2642 case GE_EXPR:
2643 case MULT_EXPR:
2644 case RDIV_EXPR:
2645 case TRUNC_DIV_EXPR:
2646 case FLOOR_DIV_EXPR:
2647 case CEIL_DIV_EXPR:
2648 case ROUND_DIV_EXPR:
2649 case TRUNC_MOD_EXPR:
2650 case FLOOR_MOD_EXPR:
2651 case CEIL_MOD_EXPR:
2652 case ROUND_MOD_EXPR:
2653 case EXACT_DIV_EXPR:
2654 case FIX_TRUNC_EXPR:
2655 case FLOAT_EXPR:
2656 case REALPART_EXPR:
2657 case IMAGPART_EXPR:
2658 /* No valid bounds may be produced by these exprs. */
2659 bounds = chkp_get_invalid_op_bounds ();
2660 break;
2661
2662 case COND_EXPR:
2663 {
2664 tree val1 = gimple_assign_rhs2 (assign);
2665 tree val2 = gimple_assign_rhs3 (assign);
2666 tree bnd1 = chkp_find_bounds (val1, &iter);
2667 tree bnd2 = chkp_find_bounds (val2, &iter);
2668 gimple stmt;
2669
2670 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2671 bounds = incomplete_bounds;
2672 else if (bnd1 == bnd2)
2673 bounds = bnd1;
2674 else
2675 {
2676 rhs1 = unshare_expr (rhs1);
2677
2678 bounds = chkp_get_tmp_reg (assign);
2679 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2680 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2681
2682 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2683 chkp_mark_invalid_bounds (bounds);
2684 }
2685 }
2686 break;
2687
2688 case MAX_EXPR:
2689 case MIN_EXPR:
2690 {
2691 tree rhs2 = gimple_assign_rhs2 (assign);
2692 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2693 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2694
2695 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2696 bounds = incomplete_bounds;
2697 else if (bnd1 == bnd2)
2698 bounds = bnd1;
2699 else
2700 {
2701 gimple stmt;
2702 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2703 boolean_type_node, rhs1, rhs2);
2704 bounds = chkp_get_tmp_reg (assign);
2705 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2706
2707 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2708
2709 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2710 chkp_mark_invalid_bounds (bounds);
2711 }
2712 }
2713 break;
2714
2715 default:
2716 bounds = chkp_get_zero_bounds ();
2717 warning (0, "pointer bounds were lost due to unexpected expression %s",
2718 get_tree_code_name (rhs_code));
2719 }
2720
2721 gcc_assert (bounds);
2722
2723 /* We may reuse bounds of other pointer we copy/modify. But it is not
2724 allowed for abnormal ssa names. If we produced a pointer using
2725 abnormal ssa name, we better make a bounds copy to avoid coalescing
2726 issues. */
2727 if (base
2728 && TREE_CODE (base) == SSA_NAME
2729 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2730 {
2731 gimple stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2732 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2733 bounds = gimple_assign_lhs (stmt);
2734 }
2735
2736 if (node)
2737 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2738
2739 return bounds;
2740 }
2741
2742 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2743
2744 There are just few statement codes allowed: NOP (for default ssa names),
2745 ASSIGN, CALL, PHI, ASM.
2746
2747 Return computed bounds. */
2748 static tree
2749 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2750 gphi_iterator *iter)
2751 {
2752 tree var, bounds;
2753 enum gimple_code code = gimple_code (def_stmt);
2754 gphi *stmt;
2755
2756 if (dump_file && (dump_flags & TDF_DETAILS))
2757 {
2758 fprintf (dump_file, "Searching for bounds for node: ");
2759 print_generic_expr (dump_file, node, 0);
2760
2761 fprintf (dump_file, " using its definition: ");
2762 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2763 }
2764
2765 switch (code)
2766 {
2767 case GIMPLE_NOP:
2768 var = SSA_NAME_VAR (node);
2769 switch (TREE_CODE (var))
2770 {
2771 case PARM_DECL:
2772 bounds = chkp_get_bound_for_parm (node);
2773 break;
2774
2775 case VAR_DECL:
2776 /* For uninitialized pointers use none bounds. */
2777 bounds = chkp_get_none_bounds ();
2778 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2779 break;
2780
2781 case RESULT_DECL:
2782 {
2783 tree base_type;
2784
2785 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2786
2787 base_type = TREE_TYPE (TREE_TYPE (node));
2788
2789 gcc_assert (TYPE_SIZE (base_type)
2790 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2791 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2792
2793 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2794 NULL, false);
2795 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2796 }
2797 break;
2798
2799 default:
2800 if (dump_file && (dump_flags & TDF_DETAILS))
2801 {
2802 fprintf (dump_file, "Unexpected var with no definition\n");
2803 print_generic_expr (dump_file, var, 0);
2804 }
2805 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2806 get_tree_code_name (TREE_CODE (var)));
2807 }
2808 break;
2809
2810 case GIMPLE_ASSIGN:
2811 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2812 break;
2813
2814 case GIMPLE_CALL:
2815 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2816 break;
2817
2818 case GIMPLE_PHI:
2819 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2820 if (SSA_NAME_VAR (node))
2821 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2822 else
2823 var = make_temp_ssa_name (pointer_bounds_type_node,
2824 NULL,
2825 CHKP_BOUND_TMP_NAME);
2826 else
2827 var = chkp_get_tmp_var ();
2828 stmt = create_phi_node (var, gimple_bb (def_stmt));
2829 bounds = gimple_phi_result (stmt);
2830 *iter = gsi_for_phi (stmt);
2831
2832 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2833
2834 /* Created bounds do not have all phi args computed and
2835 therefore we do not know if there is a valid source
2836 of bounds for that node. Therefore we mark bounds
2837 as incomplete and then recompute them when all phi
2838 args are computed. */
2839 chkp_register_incomplete_bounds (bounds, node);
2840 break;
2841
2842 case GIMPLE_ASM:
2843 bounds = chkp_get_zero_bounds ();
2844 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2845 break;
2846
2847 default:
2848 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2849 gimple_code_name[code]);
2850 }
2851
2852 return bounds;
2853 }
2854
2855 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2856 tree
2857 chkp_build_make_bounds_call (tree lower_bound, tree size)
2858 {
2859 tree call = build1 (ADDR_EXPR,
2860 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2861 chkp_bndmk_fndecl);
2862 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2863 call, 2, lower_bound, size);
2864 }
2865
2866 /* Create static bounds var of specfified OBJ which is
2867 is either VAR_DECL or string constant. */
2868 static tree
2869 chkp_make_static_bounds (tree obj)
2870 {
2871 static int string_id = 1;
2872 static int var_id = 1;
2873 tree *slot;
2874 const char *var_name;
2875 char *bnd_var_name;
2876 tree bnd_var;
2877
2878 /* First check if we already have required var. */
2879 if (chkp_static_var_bounds)
2880 {
2881 /* For vars we use assembler name as a key in
2882 chkp_static_var_bounds map. It allows to
2883 avoid duplicating bound vars for decls
2884 sharing assembler name. */
2885 if (TREE_CODE (obj) == VAR_DECL)
2886 {
2887 tree name = DECL_ASSEMBLER_NAME (obj);
2888 slot = chkp_static_var_bounds->get (name);
2889 if (slot)
2890 return *slot;
2891 }
2892 else
2893 {
2894 slot = chkp_static_var_bounds->get (obj);
2895 if (slot)
2896 return *slot;
2897 }
2898 }
2899
2900 /* Build decl for bounds var. */
2901 if (TREE_CODE (obj) == VAR_DECL)
2902 {
2903 if (DECL_IGNORED_P (obj))
2904 {
2905 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2906 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2907 }
2908 else
2909 {
2910 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2911
2912 /* For hidden symbols we want to skip first '*' char. */
2913 if (*var_name == '*')
2914 var_name++;
2915
2916 bnd_var_name = (char *) xmalloc (strlen (var_name)
2917 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2918 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2919 strcat (bnd_var_name, var_name);
2920 }
2921
2922 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2923 get_identifier (bnd_var_name),
2924 pointer_bounds_type_node);
2925
2926 /* Address of the obj will be used as lower bound. */
2927 TREE_ADDRESSABLE (obj) = 1;
2928 }
2929 else
2930 {
2931 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2932 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2933
2934 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2935 get_identifier (bnd_var_name),
2936 pointer_bounds_type_node);
2937 }
2938
2939 TREE_PUBLIC (bnd_var) = 0;
2940 TREE_USED (bnd_var) = 1;
2941 TREE_READONLY (bnd_var) = 0;
2942 TREE_STATIC (bnd_var) = 1;
2943 TREE_ADDRESSABLE (bnd_var) = 0;
2944 DECL_ARTIFICIAL (bnd_var) = 1;
2945 DECL_COMMON (bnd_var) = 1;
2946 DECL_COMDAT (bnd_var) = 1;
2947 DECL_READ_P (bnd_var) = 1;
2948 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2949 /* Force output similar to constant bounds.
2950 See chkp_make_static_const_bounds. */
2951 varpool_node::get_create (bnd_var)->force_output = 1;
2952 /* Mark symbol as requiring bounds initialization. */
2953 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2954 varpool_node::finalize_decl (bnd_var);
2955
2956 /* Add created var to the map to use it for other references
2957 to obj. */
2958 if (!chkp_static_var_bounds)
2959 chkp_static_var_bounds = new hash_map<tree, tree>;
2960
2961 if (TREE_CODE (obj) == VAR_DECL)
2962 {
2963 tree name = DECL_ASSEMBLER_NAME (obj);
2964 chkp_static_var_bounds->put (name, bnd_var);
2965 }
2966 else
2967 chkp_static_var_bounds->put (obj, bnd_var);
2968
2969 return bnd_var;
2970 }
2971
2972 /* When var has incomplete type we cannot get size to
2973 compute its bounds. In such cases we use checker
2974 builtin call which determines object size at runtime. */
2975 static tree
2976 chkp_generate_extern_var_bounds (tree var)
2977 {
2978 tree bounds, size_reloc, lb, size, max_size, cond;
2979 gimple_stmt_iterator gsi;
2980 gimple_seq seq = NULL;
2981 gimple stmt;
2982
2983 /* If instrumentation is not enabled for vars having
2984 incomplete type then just return zero bounds to avoid
2985 checks for this var. */
2986 if (!flag_chkp_incomplete_type)
2987 return chkp_get_zero_bounds ();
2988
2989 if (dump_file && (dump_flags & TDF_DETAILS))
2990 {
2991 fprintf (dump_file, "Generating bounds for extern symbol '");
2992 print_generic_expr (dump_file, var, 0);
2993 fprintf (dump_file, "'\n");
2994 }
2995
2996 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2997
2998 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2999 gimple_call_set_lhs (stmt, size_reloc);
3000
3001 gimple_seq_add_stmt (&seq, stmt);
3002
3003 lb = chkp_build_addr_expr (var);
3004 size = make_ssa_name (chkp_get_size_tmp_var ());
3005
3006 if (flag_chkp_zero_dynamic_size_as_infinite)
3007 {
3008 /* We should check that size relocation was resolved.
3009 If it was not then use maximum possible size for the var. */
3010 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3011 fold_convert (chkp_uintptr_type, lb));
3012 max_size = chkp_force_gimple_call_op (max_size, &seq);
3013
3014 cond = build2 (NE_EXPR, boolean_type_node,
3015 size_reloc, integer_zero_node);
3016 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3017 gimple_seq_add_stmt (&seq, stmt);
3018 }
3019 else
3020 {
3021 stmt = gimple_build_assign (size, size_reloc);
3022 gimple_seq_add_stmt (&seq, stmt);
3023 }
3024
3025 gsi = gsi_start_bb (chkp_get_entry_block ());
3026 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3027
3028 bounds = chkp_make_bounds (lb, size, &gsi, true);
3029
3030 return bounds;
3031 }
3032
3033 /* Return 1 if TYPE has fields with zero size or fields
3034 marked with chkp_variable_size attribute. */
3035 bool
3036 chkp_variable_size_type (tree type)
3037 {
3038 bool res = false;
3039 tree field;
3040
3041 if (RECORD_OR_UNION_TYPE_P (type))
3042 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3043 {
3044 if (TREE_CODE (field) == FIELD_DECL)
3045 res = res
3046 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3047 || chkp_variable_size_type (TREE_TYPE (field));
3048 }
3049 else
3050 res = !TYPE_SIZE (type)
3051 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3052 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3053
3054 return res;
3055 }
3056
3057 /* Compute and return bounds for address of DECL which is
3058 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3059 static tree
3060 chkp_get_bounds_for_decl_addr (tree decl)
3061 {
3062 tree bounds;
3063
3064 gcc_assert (TREE_CODE (decl) == VAR_DECL
3065 || TREE_CODE (decl) == PARM_DECL
3066 || TREE_CODE (decl) == RESULT_DECL);
3067
3068 bounds = chkp_get_registered_addr_bounds (decl);
3069
3070 if (bounds)
3071 return bounds;
3072
3073 if (dump_file && (dump_flags & TDF_DETAILS))
3074 {
3075 fprintf (dump_file, "Building bounds for address of decl ");
3076 print_generic_expr (dump_file, decl, 0);
3077 fprintf (dump_file, "\n");
3078 }
3079
3080 /* Use zero bounds if size is unknown and checks for
3081 unknown sizes are restricted. */
3082 if ((!DECL_SIZE (decl)
3083 || (chkp_variable_size_type (TREE_TYPE (decl))
3084 && (TREE_STATIC (decl)
3085 || DECL_EXTERNAL (decl)
3086 || TREE_PUBLIC (decl))))
3087 && !flag_chkp_incomplete_type)
3088 return chkp_get_zero_bounds ();
3089
3090 if (flag_chkp_use_static_bounds
3091 && TREE_CODE (decl) == VAR_DECL
3092 && (TREE_STATIC (decl)
3093 || DECL_EXTERNAL (decl)
3094 || TREE_PUBLIC (decl))
3095 && !DECL_THREAD_LOCAL_P (decl))
3096 {
3097 tree bnd_var = chkp_make_static_bounds (decl);
3098 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3099 gimple stmt;
3100
3101 bounds = chkp_get_tmp_reg (NULL);
3102 stmt = gimple_build_assign (bounds, bnd_var);
3103 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3104 }
3105 else if (!DECL_SIZE (decl)
3106 || (chkp_variable_size_type (TREE_TYPE (decl))
3107 && (TREE_STATIC (decl)
3108 || DECL_EXTERNAL (decl)
3109 || TREE_PUBLIC (decl))))
3110 {
3111 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3112 bounds = chkp_generate_extern_var_bounds (decl);
3113 }
3114 else
3115 {
3116 tree lb = chkp_build_addr_expr (decl);
3117 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3118 }
3119
3120 return bounds;
3121 }
3122
3123 /* Compute and return bounds for constant string. */
3124 static tree
3125 chkp_get_bounds_for_string_cst (tree cst)
3126 {
3127 tree bounds;
3128 tree lb;
3129 tree size;
3130
3131 gcc_assert (TREE_CODE (cst) == STRING_CST);
3132
3133 bounds = chkp_get_registered_bounds (cst);
3134
3135 if (bounds)
3136 return bounds;
3137
3138 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3139 || flag_chkp_use_static_const_bounds > 0)
3140 {
3141 tree bnd_var = chkp_make_static_bounds (cst);
3142 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3143 gimple stmt;
3144
3145 bounds = chkp_get_tmp_reg (NULL);
3146 stmt = gimple_build_assign (bounds, bnd_var);
3147 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3148 }
3149 else
3150 {
3151 lb = chkp_build_addr_expr (cst);
3152 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3153 bounds = chkp_make_bounds (lb, size, NULL, false);
3154 }
3155
3156 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3157
3158 return bounds;
3159 }
3160
3161 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3162 return the result. if ITER is not NULL then Code is inserted
3163 before position pointed by ITER. Otherwise code is added to
3164 entry block. */
3165 static tree
3166 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3167 {
3168 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3169 return bounds2 ? bounds2 : bounds1;
3170 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3171 return bounds1;
3172 else
3173 {
3174 gimple_seq seq;
3175 gimple stmt;
3176 tree bounds;
3177
3178 seq = NULL;
3179
3180 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3181 chkp_mark_stmt (stmt);
3182
3183 bounds = chkp_get_tmp_reg (stmt);
3184 gimple_call_set_lhs (stmt, bounds);
3185
3186 gimple_seq_add_stmt (&seq, stmt);
3187
3188 /* We are probably doing narrowing for constant expression.
3189 In such case iter may be undefined. */
3190 if (!iter)
3191 {
3192 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3193 iter = &gsi;
3194 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3195 }
3196 else
3197 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3198
3199 if (dump_file && (dump_flags & TDF_DETAILS))
3200 {
3201 fprintf (dump_file, "Bounds intersection: ");
3202 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3203 fprintf (dump_file, " inserted before statement: ");
3204 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3205 TDF_VOPS|TDF_MEMSYMS);
3206 }
3207
3208 return bounds;
3209 }
3210 }
3211
3212 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3213 and 0 othersize. */
3214 static bool
3215 chkp_may_narrow_to_field (tree field)
3216 {
3217 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3218 && tree_to_uhwi (DECL_SIZE (field)) != 0
3219 && (!DECL_FIELD_OFFSET (field)
3220 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3221 && (!DECL_FIELD_BIT_OFFSET (field)
3222 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3223 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3224 && !chkp_variable_size_type (TREE_TYPE (field));
3225 }
3226
3227 /* Return 1 if bounds for FIELD should be narrowed to
3228 field's own size. */
3229 static bool
3230 chkp_narrow_bounds_for_field (tree field)
3231 {
3232 HOST_WIDE_INT offs;
3233 HOST_WIDE_INT bit_offs;
3234
3235 if (!chkp_may_narrow_to_field (field))
3236 return false;
3237
3238 /* Accesse to compiler generated fields should not cause
3239 bounds narrowing. */
3240 if (DECL_ARTIFICIAL (field))
3241 return false;
3242
3243 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3244 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3245
3246 return (flag_chkp_narrow_bounds
3247 && (flag_chkp_first_field_has_own_bounds
3248 || offs
3249 || bit_offs));
3250 }
3251
3252 /* Perform narrowing for BOUNDS using bounds computed for field
3253 access COMPONENT. ITER meaning is the same as for
3254 chkp_intersect_bounds. */
3255 static tree
3256 chkp_narrow_bounds_to_field (tree bounds, tree component,
3257 gimple_stmt_iterator *iter)
3258 {
3259 tree field = TREE_OPERAND (component, 1);
3260 tree size = DECL_SIZE_UNIT (field);
3261 tree field_ptr = chkp_build_addr_expr (component);
3262 tree field_bounds;
3263
3264 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3265
3266 return chkp_intersect_bounds (field_bounds, bounds, iter);
3267 }
3268
3269 /* Parse field or array access NODE.
3270
3271 PTR ouput parameter holds a pointer to the outermost
3272 object.
3273
3274 BITFIELD output parameter is set to 1 if bitfield is
3275 accessed and to 0 otherwise. If it is 1 then ELT holds
3276 outer component for accessed bit field.
3277
3278 SAFE outer parameter is set to 1 if access is safe and
3279 checks are not required.
3280
3281 BOUNDS outer parameter holds bounds to be used to check
3282 access (may be NULL).
3283
3284 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3285 innermost accessed component. */
3286 static void
3287 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3288 tree *elt, bool *safe,
3289 bool *bitfield,
3290 tree *bounds,
3291 gimple_stmt_iterator *iter,
3292 bool innermost_bounds)
3293 {
3294 tree comp_to_narrow = NULL_TREE;
3295 tree last_comp = NULL_TREE;
3296 bool array_ref_found = false;
3297 tree *nodes;
3298 tree var;
3299 int len;
3300 int i;
3301
3302 /* Compute tree height for expression. */
3303 var = node;
3304 len = 1;
3305 while (TREE_CODE (var) == COMPONENT_REF
3306 || TREE_CODE (var) == ARRAY_REF
3307 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3308 {
3309 var = TREE_OPERAND (var, 0);
3310 len++;
3311 }
3312
3313 gcc_assert (len > 1);
3314
3315 /* It is more convenient for us to scan left-to-right,
3316 so walk tree again and put all node to nodes vector
3317 in reversed order. */
3318 nodes = XALLOCAVEC (tree, len);
3319 nodes[len - 1] = node;
3320 for (i = len - 2; i >= 0; i--)
3321 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3322
3323 if (bounds)
3324 *bounds = NULL;
3325 *safe = true;
3326 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3327 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3328 /* To get bitfield address we will need outer elemnt. */
3329 if (*bitfield)
3330 *elt = nodes[len - 2];
3331 else
3332 *elt = NULL_TREE;
3333
3334 /* If we have indirection in expression then compute
3335 outermost structure bounds. Computed bounds may be
3336 narrowed later. */
3337 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3338 {
3339 *safe = false;
3340 *ptr = TREE_OPERAND (nodes[0], 0);
3341 if (bounds)
3342 *bounds = chkp_find_bounds (*ptr, iter);
3343 }
3344 else
3345 {
3346 gcc_assert (TREE_CODE (var) == VAR_DECL
3347 || TREE_CODE (var) == PARM_DECL
3348 || TREE_CODE (var) == RESULT_DECL
3349 || TREE_CODE (var) == STRING_CST
3350 || TREE_CODE (var) == SSA_NAME);
3351
3352 *ptr = chkp_build_addr_expr (var);
3353 }
3354
3355 /* In this loop we are trying to find a field access
3356 requiring narrowing. There are two simple rules
3357 for search:
3358 1. Leftmost array_ref is chosen if any.
3359 2. Rightmost suitable component_ref is chosen if innermost
3360 bounds are required and no array_ref exists. */
3361 for (i = 1; i < len; i++)
3362 {
3363 var = nodes[i];
3364
3365 if (TREE_CODE (var) == ARRAY_REF)
3366 {
3367 *safe = false;
3368 array_ref_found = true;
3369 if (flag_chkp_narrow_bounds
3370 && !flag_chkp_narrow_to_innermost_arrray
3371 && (!last_comp
3372 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3373 {
3374 comp_to_narrow = last_comp;
3375 break;
3376 }
3377 }
3378 else if (TREE_CODE (var) == COMPONENT_REF)
3379 {
3380 tree field = TREE_OPERAND (var, 1);
3381
3382 if (innermost_bounds
3383 && !array_ref_found
3384 && chkp_narrow_bounds_for_field (field))
3385 comp_to_narrow = var;
3386 last_comp = var;
3387
3388 if (flag_chkp_narrow_bounds
3389 && flag_chkp_narrow_to_innermost_arrray
3390 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3391 {
3392 if (bounds)
3393 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3394 comp_to_narrow = NULL;
3395 }
3396 }
3397 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3398 /* Nothing to do for it. */
3399 ;
3400 else
3401 gcc_unreachable ();
3402 }
3403
3404 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3405 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3406
3407 if (innermost_bounds && bounds && !*bounds)
3408 *bounds = chkp_find_bounds (*ptr, iter);
3409 }
3410
3411 /* Compute and return bounds for address of OBJ. */
3412 static tree
3413 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3414 {
3415 tree bounds = chkp_get_registered_addr_bounds (obj);
3416
3417 if (bounds)
3418 return bounds;
3419
3420 switch (TREE_CODE (obj))
3421 {
3422 case VAR_DECL:
3423 case PARM_DECL:
3424 case RESULT_DECL:
3425 bounds = chkp_get_bounds_for_decl_addr (obj);
3426 break;
3427
3428 case STRING_CST:
3429 bounds = chkp_get_bounds_for_string_cst (obj);
3430 break;
3431
3432 case ARRAY_REF:
3433 case COMPONENT_REF:
3434 {
3435 tree elt;
3436 tree ptr;
3437 bool safe;
3438 bool bitfield;
3439
3440 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3441 &bitfield, &bounds, iter, true);
3442
3443 gcc_assert (bounds);
3444 }
3445 break;
3446
3447 case FUNCTION_DECL:
3448 case LABEL_DECL:
3449 bounds = chkp_get_zero_bounds ();
3450 break;
3451
3452 case MEM_REF:
3453 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3454 break;
3455
3456 case REALPART_EXPR:
3457 case IMAGPART_EXPR:
3458 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3459 break;
3460
3461 default:
3462 if (dump_file && (dump_flags & TDF_DETAILS))
3463 {
3464 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3465 "unexpected object of type %s\n",
3466 get_tree_code_name (TREE_CODE (obj)));
3467 print_node (dump_file, "", obj, 0);
3468 }
3469 internal_error ("chkp_make_addressed_object_bounds: "
3470 "Unexpected tree code %s",
3471 get_tree_code_name (TREE_CODE (obj)));
3472 }
3473
3474 chkp_register_addr_bounds (obj, bounds);
3475
3476 return bounds;
3477 }
3478
3479 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3480 to compute bounds if required. Computed bounds should be available at
3481 position pointed by ITER.
3482
3483 If PTR_SRC is NULL_TREE then pointer definition is identified.
3484
3485 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3486 PTR. If PTR is a any memory reference then ITER points to a statement
3487 after which bndldx will be inserterd. In both cases ITER will be updated
3488 to point to the inserted bndldx statement. */
3489
3490 static tree
3491 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3492 {
3493 tree addr = NULL_TREE;
3494 tree bounds = NULL_TREE;
3495
3496 if (!ptr_src)
3497 ptr_src = ptr;
3498
3499 bounds = chkp_get_registered_bounds (ptr_src);
3500
3501 if (bounds)
3502 return bounds;
3503
3504 switch (TREE_CODE (ptr_src))
3505 {
3506 case MEM_REF:
3507 case VAR_DECL:
3508 if (BOUNDED_P (ptr_src))
3509 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3510 bounds = chkp_get_zero_bounds ();
3511 else
3512 {
3513 addr = chkp_build_addr_expr (ptr_src);
3514 bounds = chkp_build_bndldx (addr, ptr, iter);
3515 }
3516 else
3517 bounds = chkp_get_nonpointer_load_bounds ();
3518 break;
3519
3520 case ARRAY_REF:
3521 case COMPONENT_REF:
3522 addr = get_base_address (ptr_src);
3523 if (DECL_P (addr)
3524 || TREE_CODE (addr) == MEM_REF
3525 || TREE_CODE (addr) == TARGET_MEM_REF)
3526 {
3527 if (BOUNDED_P (ptr_src))
3528 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3529 bounds = chkp_get_zero_bounds ();
3530 else
3531 {
3532 addr = chkp_build_addr_expr (ptr_src);
3533 bounds = chkp_build_bndldx (addr, ptr, iter);
3534 }
3535 else
3536 bounds = chkp_get_nonpointer_load_bounds ();
3537 }
3538 else
3539 {
3540 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3541 bounds = chkp_find_bounds (addr, iter);
3542 }
3543 break;
3544
3545 case PARM_DECL:
3546 gcc_unreachable ();
3547 bounds = chkp_get_bound_for_parm (ptr_src);
3548 break;
3549
3550 case TARGET_MEM_REF:
3551 addr = chkp_build_addr_expr (ptr_src);
3552 bounds = chkp_build_bndldx (addr, ptr, iter);
3553 break;
3554
3555 case SSA_NAME:
3556 bounds = chkp_get_registered_bounds (ptr_src);
3557 if (!bounds)
3558 {
3559 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3560 gphi_iterator phi_iter;
3561
3562 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3563
3564 gcc_assert (bounds);
3565
3566 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3567 {
3568 unsigned i;
3569
3570 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3571 {
3572 tree arg = gimple_phi_arg_def (def_phi, i);
3573 tree arg_bnd;
3574 gphi *phi_bnd;
3575
3576 arg_bnd = chkp_find_bounds (arg, NULL);
3577
3578 /* chkp_get_bounds_by_definition created new phi
3579 statement and phi_iter points to it.
3580
3581 Previous call to chkp_find_bounds could create
3582 new basic block and therefore change phi statement
3583 phi_iter points to. */
3584 phi_bnd = phi_iter.phi ();
3585
3586 add_phi_arg (phi_bnd, arg_bnd,
3587 gimple_phi_arg_edge (def_phi, i),
3588 UNKNOWN_LOCATION);
3589 }
3590
3591 /* If all bound phi nodes have their arg computed
3592 then we may finish its computation. See
3593 chkp_finish_incomplete_bounds for more details. */
3594 if (chkp_may_finish_incomplete_bounds ())
3595 chkp_finish_incomplete_bounds ();
3596 }
3597
3598 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3599 || chkp_incomplete_bounds (bounds));
3600 }
3601 break;
3602
3603 case ADDR_EXPR:
3604 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3605 break;
3606
3607 case INTEGER_CST:
3608 if (integer_zerop (ptr_src))
3609 bounds = chkp_get_none_bounds ();
3610 else
3611 bounds = chkp_get_invalid_op_bounds ();
3612 break;
3613
3614 default:
3615 if (dump_file && (dump_flags & TDF_DETAILS))
3616 {
3617 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3618 get_tree_code_name (TREE_CODE (ptr_src)));
3619 print_node (dump_file, "", ptr_src, 0);
3620 }
3621 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3622 get_tree_code_name (TREE_CODE (ptr_src)));
3623 }
3624
3625 if (!bounds)
3626 {
3627 if (dump_file && (dump_flags & TDF_DETAILS))
3628 {
3629 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3630 print_node (dump_file, "", ptr_src, 0);
3631 }
3632 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3633 }
3634
3635 return bounds;
3636 }
3637
3638 /* Normal case for bounds search without forced narrowing. */
3639 static tree
3640 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3641 {
3642 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3643 }
3644
3645 /* Search bounds for pointer PTR loaded from PTR_SRC
3646 by statement *ITER points to. */
3647 static tree
3648 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3649 {
3650 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3651 }
3652
3653 /* Helper function which checks type of RHS and finds all pointers in
3654 it. For each found pointer we build it's accesses in LHS and RHS
3655 objects and then call HANDLER for them. Function is used to copy
3656 or initilize bounds for copied object. */
3657 static void
3658 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3659 assign_handler handler)
3660 {
3661 tree type = TREE_TYPE (lhs);
3662
3663 /* We have nothing to do with clobbers. */
3664 if (TREE_CLOBBER_P (rhs))
3665 return;
3666
3667 if (BOUNDED_TYPE_P (type))
3668 handler (lhs, rhs, arg);
3669 else if (RECORD_OR_UNION_TYPE_P (type))
3670 {
3671 tree field;
3672
3673 if (TREE_CODE (rhs) == CONSTRUCTOR)
3674 {
3675 unsigned HOST_WIDE_INT cnt;
3676 tree val;
3677
3678 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3679 {
3680 if (chkp_type_has_pointer (TREE_TYPE (field)))
3681 {
3682 tree lhs_field = chkp_build_component_ref (lhs, field);
3683 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3684 }
3685 }
3686 }
3687 else
3688 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3689 if (TREE_CODE (field) == FIELD_DECL
3690 && chkp_type_has_pointer (TREE_TYPE (field)))
3691 {
3692 tree rhs_field = chkp_build_component_ref (rhs, field);
3693 tree lhs_field = chkp_build_component_ref (lhs, field);
3694 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3695 }
3696 }
3697 else if (TREE_CODE (type) == ARRAY_TYPE)
3698 {
3699 unsigned HOST_WIDE_INT cur = 0;
3700 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3701 tree etype = TREE_TYPE (type);
3702 tree esize = TYPE_SIZE (etype);
3703
3704 if (TREE_CODE (rhs) == CONSTRUCTOR)
3705 {
3706 unsigned HOST_WIDE_INT cnt;
3707 tree purp, val, lhs_elem;
3708
3709 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3710 {
3711 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3712 {
3713 tree lo_index = TREE_OPERAND (purp, 0);
3714 tree hi_index = TREE_OPERAND (purp, 1);
3715
3716 for (cur = (unsigned)tree_to_uhwi (lo_index);
3717 cur <= (unsigned)tree_to_uhwi (hi_index);
3718 cur++)
3719 {
3720 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3721 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3722 }
3723 }
3724 else
3725 {
3726 if (purp)
3727 {
3728 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3729 cur = tree_to_uhwi (purp);
3730 }
3731
3732 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3733
3734 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3735 }
3736 }
3737 }
3738 /* Copy array only when size is known. */
3739 else if (maxval && !integer_minus_onep (maxval))
3740 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3741 {
3742 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3743 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3744 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3745 }
3746 }
3747 else
3748 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3749 get_tree_code_name (TREE_CODE (type)));
3750 }
3751
3752 /* Add code to copy bounds for assignment of RHS to LHS.
3753 ARG is an iterator pointing ne code position. */
3754 static void
3755 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3756 {
3757 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3758 tree bounds = chkp_find_bounds (rhs, iter);
3759 tree addr = chkp_build_addr_expr(lhs);
3760
3761 chkp_build_bndstx (addr, rhs, bounds, iter);
3762 }
3763
3764 /* Emit static bound initilizers and size vars. */
3765 void
3766 chkp_finish_file (void)
3767 {
3768 struct varpool_node *node;
3769 struct chkp_ctor_stmt_list stmts;
3770
3771 if (seen_error ())
3772 return;
3773
3774 /* Iterate through varpool and generate bounds initialization
3775 constructors for all statically initialized pointers. */
3776 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3777 stmts.stmts = NULL;
3778 FOR_EACH_VARIABLE (node)
3779 /* Check that var is actually emitted and we need and may initialize
3780 its bounds. */
3781 if (node->need_bounds_init
3782 && !POINTER_BOUNDS_P (node->decl)
3783 && DECL_RTL (node->decl)
3784 && MEM_P (DECL_RTL (node->decl))
3785 && TREE_ASM_WRITTEN (node->decl))
3786 {
3787 chkp_walk_pointer_assignments (node->decl,
3788 DECL_INITIAL (node->decl),
3789 &stmts,
3790 chkp_add_modification_to_stmt_list);
3791
3792 if (stmts.avail <= 0)
3793 {
3794 cgraph_build_static_cdtor ('P', stmts.stmts,
3795 MAX_RESERVED_INIT_PRIORITY + 3);
3796 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3797 stmts.stmts = NULL;
3798 }
3799 }
3800
3801 if (stmts.stmts)
3802 cgraph_build_static_cdtor ('P', stmts.stmts,
3803 MAX_RESERVED_INIT_PRIORITY + 3);
3804
3805 /* Iterate through varpool and generate bounds initialization
3806 constructors for all static bounds vars. */
3807 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3808 stmts.stmts = NULL;
3809 FOR_EACH_VARIABLE (node)
3810 if (node->need_bounds_init
3811 && POINTER_BOUNDS_P (node->decl)
3812 && TREE_ASM_WRITTEN (node->decl))
3813 {
3814 tree bnd = node->decl;
3815 tree var;
3816
3817 gcc_assert (DECL_INITIAL (bnd)
3818 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3819
3820 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3821 chkp_output_static_bounds (bnd, var, &stmts);
3822 }
3823
3824 if (stmts.stmts)
3825 cgraph_build_static_cdtor ('B', stmts.stmts,
3826 MAX_RESERVED_INIT_PRIORITY + 2);
3827
3828 delete chkp_static_var_bounds;
3829 delete chkp_bounds_map;
3830 }
3831
3832 /* An instrumentation function which is called for each statement
3833 having memory access we want to instrument. It inserts check
3834 code and bounds copy code.
3835
3836 ITER points to statement to instrument.
3837
3838 NODE holds memory access in statement to check.
3839
3840 LOC holds the location information for statement.
3841
3842 DIRFLAGS determines whether access is read or write.
3843
3844 ACCESS_OFFS should be added to address used in NODE
3845 before check.
3846
3847 ACCESS_SIZE holds size of checked access.
3848
3849 SAFE indicates if NODE access is safe and should not be
3850 checked. */
3851 static void
3852 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3853 location_t loc, tree dirflag,
3854 tree access_offs, tree access_size,
3855 bool safe)
3856 {
3857 tree node_type = TREE_TYPE (node);
3858 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3859 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3860 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3861 tree ptr = NULL_TREE; /* a pointer used for dereference */
3862 tree bounds = NULL_TREE;
3863
3864 /* We do not need instrumentation for clobbers. */
3865 if (dirflag == integer_one_node
3866 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3867 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3868 return;
3869
3870 switch (TREE_CODE (node))
3871 {
3872 case ARRAY_REF:
3873 case COMPONENT_REF:
3874 {
3875 bool bitfield;
3876 tree elt;
3877
3878 if (safe)
3879 {
3880 /* We are not going to generate any checks, so do not
3881 generate bounds as well. */
3882 addr_first = chkp_build_addr_expr (node);
3883 break;
3884 }
3885
3886 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3887 &bitfield, &bounds, iter, false);
3888
3889 /* Break if there is no dereference and operation is safe. */
3890
3891 if (bitfield)
3892 {
3893 tree field = TREE_OPERAND (node, 1);
3894
3895 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3896 size = DECL_SIZE_UNIT (field);
3897
3898 if (elt)
3899 elt = chkp_build_addr_expr (elt);
3900 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3901 addr_first = fold_build_pointer_plus_loc (loc,
3902 addr_first,
3903 byte_position (field));
3904 }
3905 else
3906 addr_first = chkp_build_addr_expr (node);
3907 }
3908 break;
3909
3910 case INDIRECT_REF:
3911 ptr = TREE_OPERAND (node, 0);
3912 addr_first = ptr;
3913 break;
3914
3915 case MEM_REF:
3916 ptr = TREE_OPERAND (node, 0);
3917 addr_first = chkp_build_addr_expr (node);
3918 break;
3919
3920 case TARGET_MEM_REF:
3921 ptr = TMR_BASE (node);
3922 addr_first = chkp_build_addr_expr (node);
3923 break;
3924
3925 case ARRAY_RANGE_REF:
3926 printf("ARRAY_RANGE_REF\n");
3927 debug_gimple_stmt(gsi_stmt(*iter));
3928 debug_tree(node);
3929 gcc_unreachable ();
3930 break;
3931
3932 case BIT_FIELD_REF:
3933 {
3934 tree offs, rem, bpu;
3935
3936 gcc_assert (!access_offs);
3937 gcc_assert (!access_size);
3938
3939 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3940 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3941 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3942 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3943
3944 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3945 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3946 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3947 size = fold_convert (size_type_node, size);
3948
3949 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3950 dirflag, offs, size, safe);
3951 return;
3952 }
3953 break;
3954
3955 case VAR_DECL:
3956 case RESULT_DECL:
3957 case PARM_DECL:
3958 if (dirflag != integer_one_node
3959 || DECL_REGISTER (node))
3960 return;
3961
3962 safe = true;
3963 addr_first = chkp_build_addr_expr (node);
3964 break;
3965
3966 default:
3967 return;
3968 }
3969
3970 /* If addr_last was not computed then use (addr_first + size - 1)
3971 expression to compute it. */
3972 if (!addr_last)
3973 {
3974 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3975 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3976 }
3977
3978 /* Shift both first_addr and last_addr by access_offs if specified. */
3979 if (access_offs)
3980 {
3981 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3982 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3983 }
3984
3985 /* Generate bndcl/bndcu checks if memory access is not safe. */
3986 if (!safe)
3987 {
3988 gimple_stmt_iterator stmt_iter = *iter;
3989
3990 if (!bounds)
3991 bounds = chkp_find_bounds (ptr, iter);
3992
3993 chkp_check_mem_access (addr_first, addr_last, bounds,
3994 stmt_iter, loc, dirflag);
3995 }
3996
3997 /* We need to store bounds in case pointer is stored. */
3998 if (dirflag == integer_one_node
3999 && chkp_type_has_pointer (node_type)
4000 && flag_chkp_store_bounds)
4001 {
4002 gimple stmt = gsi_stmt (*iter);
4003 tree rhs1 = gimple_assign_rhs1 (stmt);
4004 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4005
4006 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4007 chkp_walk_pointer_assignments (node, rhs1, iter,
4008 chkp_copy_bounds_for_elem);
4009 else
4010 {
4011 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4012 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4013 }
4014 }
4015 }
4016
4017 /* Add code to copy bounds for all pointers copied
4018 in ASSIGN created during inline of EDGE. */
4019 void
4020 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
4021 {
4022 tree lhs = gimple_assign_lhs (assign);
4023 tree rhs = gimple_assign_rhs1 (assign);
4024 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4025
4026 if (!flag_chkp_store_bounds)
4027 return;
4028
4029 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4030
4031 /* We should create edges for all created calls to bndldx and bndstx. */
4032 while (gsi_stmt (iter) != assign)
4033 {
4034 gimple stmt = gsi_stmt (iter);
4035 if (gimple_code (stmt) == GIMPLE_CALL)
4036 {
4037 tree fndecl = gimple_call_fndecl (stmt);
4038 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4039 struct cgraph_edge *new_edge;
4040
4041 gcc_assert (fndecl == chkp_bndstx_fndecl
4042 || fndecl == chkp_bndldx_fndecl
4043 || fndecl == chkp_ret_bnd_fndecl);
4044
4045 new_edge = edge->caller->create_edge (callee,
4046 as_a <gcall *> (stmt),
4047 edge->count,
4048 edge->frequency);
4049 new_edge->frequency = compute_call_stmt_bb_frequency
4050 (edge->caller->decl, gimple_bb (stmt));
4051 }
4052 gsi_prev (&iter);
4053 }
4054 }
4055
4056 /* Some code transformation made during instrumentation pass
4057 may put code into inconsistent state. Here we find and fix
4058 such flaws. */
4059 void
4060 chkp_fix_cfg ()
4061 {
4062 basic_block bb;
4063 gimple_stmt_iterator i;
4064
4065 /* We could insert some code right after stmt which ends bb.
4066 We wanted to put this code on fallthru edge but did not
4067 add new edges from the beginning because it may cause new
4068 phi node creation which may be incorrect due to incomplete
4069 bound phi nodes. */
4070 FOR_ALL_BB_FN (bb, cfun)
4071 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4072 {
4073 gimple stmt = gsi_stmt (i);
4074 gimple_stmt_iterator next = i;
4075
4076 gsi_next (&next);
4077
4078 if (stmt_ends_bb_p (stmt)
4079 && !gsi_end_p (next))
4080 {
4081 edge fall = find_fallthru_edge (bb->succs);
4082 basic_block dest = NULL;
4083 int flags = 0;
4084
4085 gcc_assert (fall);
4086
4087 /* We cannot split abnormal edge. Therefore we
4088 store its params, make it regular and then
4089 rebuild abnormal edge after split. */
4090 if (fall->flags & EDGE_ABNORMAL)
4091 {
4092 flags = fall->flags & ~EDGE_FALLTHRU;
4093 dest = fall->dest;
4094
4095 fall->flags &= ~EDGE_COMPLEX;
4096 }
4097
4098 while (!gsi_end_p (next))
4099 {
4100 gimple next_stmt = gsi_stmt (next);
4101 gsi_remove (&next, false);
4102 gsi_insert_on_edge (fall, next_stmt);
4103 }
4104
4105 gsi_commit_edge_inserts ();
4106
4107 /* Re-create abnormal edge. */
4108 if (dest)
4109 make_edge (bb, dest, flags);
4110 }
4111 }
4112 }
4113
4114 /* Walker callback for chkp_replace_function_pointers. Replaces
4115 function pointer in the specified operand with pointer to the
4116 instrumented function version. */
4117 static tree
4118 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4119 void *data ATTRIBUTE_UNUSED)
4120 {
4121 if (TREE_CODE (*op) == FUNCTION_DECL
4122 && chkp_instrumentable_p (*op)
4123 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4124 /* For builtins we replace pointers only for selected
4125 function and functions having definitions. */
4126 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4127 && (chkp_instrument_normal_builtin (*op)
4128 || gimple_has_body_p (*op)))))
4129 {
4130 struct cgraph_node *node = cgraph_node::get_create (*op);
4131 struct cgraph_node *clone = NULL;
4132
4133 if (!node->instrumentation_clone)
4134 clone = chkp_maybe_create_clone (*op);
4135
4136 if (clone)
4137 *op = clone->decl;
4138 *walk_subtrees = 0;
4139 }
4140
4141 return NULL;
4142 }
4143
4144 /* This function searches for function pointers in statement
4145 pointed by GSI and replaces them with pointers to instrumented
4146 function versions. */
4147 static void
4148 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4149 {
4150 gimple stmt = gsi_stmt (*gsi);
4151 /* For calls we want to walk call args only. */
4152 if (gimple_code (stmt) == GIMPLE_CALL)
4153 {
4154 unsigned i;
4155 for (i = 0; i < gimple_call_num_args (stmt); i++)
4156 walk_tree (gimple_call_arg_ptr (stmt, i),
4157 chkp_replace_function_pointer, NULL, NULL);
4158 }
4159 else
4160 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4161 }
4162
4163 /* This function instruments all statements working with memory,
4164 calls and rets.
4165
4166 It also removes excess statements from static initializers. */
4167 static void
4168 chkp_instrument_function (void)
4169 {
4170 basic_block bb, next;
4171 gimple_stmt_iterator i;
4172 enum gimple_rhs_class grhs_class;
4173 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4174
4175 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4176 do
4177 {
4178 next = bb->next_bb;
4179 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4180 {
4181 gimple s = gsi_stmt (i);
4182
4183 /* Skip statement marked to not be instrumented. */
4184 if (chkp_marked_stmt_p (s))
4185 {
4186 gsi_next (&i);
4187 continue;
4188 }
4189
4190 chkp_replace_function_pointers (&i);
4191
4192 switch (gimple_code (s))
4193 {
4194 case GIMPLE_ASSIGN:
4195 chkp_process_stmt (&i, gimple_assign_lhs (s),
4196 gimple_location (s), integer_one_node,
4197 NULL_TREE, NULL_TREE, safe);
4198 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4199 gimple_location (s), integer_zero_node,
4200 NULL_TREE, NULL_TREE, safe);
4201 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4202 if (grhs_class == GIMPLE_BINARY_RHS)
4203 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4204 gimple_location (s), integer_zero_node,
4205 NULL_TREE, NULL_TREE, safe);
4206 break;
4207
4208 case GIMPLE_RETURN:
4209 {
4210 greturn *r = as_a <greturn *> (s);
4211 if (gimple_return_retval (r) != NULL_TREE)
4212 {
4213 chkp_process_stmt (&i, gimple_return_retval (r),
4214 gimple_location (r),
4215 integer_zero_node,
4216 NULL_TREE, NULL_TREE, safe);
4217
4218 /* Additionally we need to add bounds
4219 to return statement. */
4220 chkp_add_bounds_to_ret_stmt (&i);
4221 }
4222 }
4223 break;
4224
4225 case GIMPLE_CALL:
4226 chkp_add_bounds_to_call_stmt (&i);
4227 break;
4228
4229 default:
4230 ;
4231 }
4232
4233 gsi_next (&i);
4234
4235 /* We do not need any actual pointer stores in checker
4236 static initializer. */
4237 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4238 && gimple_code (s) == GIMPLE_ASSIGN
4239 && gimple_store_p (s))
4240 {
4241 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4242 gsi_remove (&del_iter, true);
4243 unlink_stmt_vdef (s);
4244 release_defs(s);
4245 }
4246 }
4247 bb = next;
4248 }
4249 while (bb);
4250
4251 /* Some input params may have bounds and be address taken. In this case
4252 we should store incoming bounds into bounds table. */
4253 tree arg;
4254 if (flag_chkp_store_bounds)
4255 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4256 if (TREE_ADDRESSABLE (arg))
4257 {
4258 if (BOUNDED_P (arg))
4259 {
4260 tree bounds = chkp_get_next_bounds_parm (arg);
4261 tree def_ptr = ssa_default_def (cfun, arg);
4262 gimple_stmt_iterator iter
4263 = gsi_start_bb (chkp_get_entry_block ());
4264 chkp_build_bndstx (chkp_build_addr_expr (arg),
4265 def_ptr ? def_ptr : arg,
4266 bounds, &iter);
4267
4268 /* Skip bounds arg. */
4269 arg = TREE_CHAIN (arg);
4270 }
4271 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4272 {
4273 tree orig_arg = arg;
4274 bitmap slots = BITMAP_ALLOC (NULL);
4275 gimple_stmt_iterator iter
4276 = gsi_start_bb (chkp_get_entry_block ());
4277 bitmap_iterator bi;
4278 unsigned bnd_no;
4279
4280 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4281
4282 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4283 {
4284 tree bounds = chkp_get_next_bounds_parm (arg);
4285 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4286 tree addr = chkp_build_addr_expr (orig_arg);
4287 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4288 build_int_cst (ptr_type_node, offs));
4289 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4290 bounds, &iter);
4291
4292 arg = DECL_CHAIN (arg);
4293 }
4294 BITMAP_FREE (slots);
4295 }
4296 }
4297 }
4298
4299 /* Find init/null/copy_ptr_bounds calls and replace them
4300 with assignments. It should allow better code
4301 optimization. */
4302
4303 static void
4304 chkp_remove_useless_builtins ()
4305 {
4306 basic_block bb;
4307 gimple_stmt_iterator gsi;
4308
4309 FOR_EACH_BB_FN (bb, cfun)
4310 {
4311 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4312 {
4313 gimple stmt = gsi_stmt (gsi);
4314 tree fndecl;
4315 enum built_in_function fcode;
4316
4317 /* Find builtins returning first arg and replace
4318 them with assignments. */
4319 if (gimple_code (stmt) == GIMPLE_CALL
4320 && (fndecl = gimple_call_fndecl (stmt))
4321 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4322 && (fcode = DECL_FUNCTION_CODE (fndecl))
4323 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4324 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4325 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4326 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4327 {
4328 tree res = gimple_call_arg (stmt, 0);
4329 update_call_from_tree (&gsi, res);
4330 stmt = gsi_stmt (gsi);
4331 update_stmt (stmt);
4332 }
4333 }
4334 }
4335 }
4336
4337 /* Initialize pass. */
4338 static void
4339 chkp_init (void)
4340 {
4341 basic_block bb;
4342 gimple_stmt_iterator i;
4343
4344 in_chkp_pass = true;
4345
4346 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4347 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4348 chkp_unmark_stmt (gsi_stmt (i));
4349
4350 chkp_invalid_bounds = new hash_set<tree>;
4351 chkp_completed_bounds_set = new hash_set<tree>;
4352 delete chkp_reg_bounds;
4353 chkp_reg_bounds = new hash_map<tree, tree>;
4354 delete chkp_bound_vars;
4355 chkp_bound_vars = new hash_map<tree, tree>;
4356 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4357 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4358 delete chkp_bounds_map;
4359 chkp_bounds_map = new hash_map<tree, tree>;
4360 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4361
4362 entry_block = NULL;
4363 zero_bounds = NULL_TREE;
4364 none_bounds = NULL_TREE;
4365 incomplete_bounds = integer_zero_node;
4366 tmp_var = NULL_TREE;
4367 size_tmp_var = NULL_TREE;
4368
4369 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4370
4371 /* We create these constant bounds once for each object file.
4372 These symbols go to comdat section and result in single copy
4373 of each one in the final binary. */
4374 chkp_get_zero_bounds_var ();
4375 chkp_get_none_bounds_var ();
4376
4377 calculate_dominance_info (CDI_DOMINATORS);
4378 calculate_dominance_info (CDI_POST_DOMINATORS);
4379
4380 bitmap_obstack_initialize (NULL);
4381 }
4382
4383 /* Finalize instrumentation pass. */
4384 static void
4385 chkp_fini (void)
4386 {
4387 in_chkp_pass = false;
4388
4389 delete chkp_invalid_bounds;
4390 delete chkp_completed_bounds_set;
4391 delete chkp_reg_addr_bounds;
4392 delete chkp_incomplete_bounds_map;
4393
4394 free_dominance_info (CDI_DOMINATORS);
4395 free_dominance_info (CDI_POST_DOMINATORS);
4396
4397 bitmap_obstack_release (NULL);
4398
4399 entry_block = NULL;
4400 zero_bounds = NULL_TREE;
4401 none_bounds = NULL_TREE;
4402 }
4403
4404 /* Main instrumentation pass function. */
4405 static unsigned int
4406 chkp_execute (void)
4407 {
4408 chkp_init ();
4409
4410 chkp_instrument_function ();
4411
4412 chkp_remove_useless_builtins ();
4413
4414 chkp_function_mark_instrumented (cfun->decl);
4415
4416 chkp_fix_cfg ();
4417
4418 chkp_fini ();
4419
4420 return 0;
4421 }
4422
4423 /* Instrumentation pass gate. */
4424 static bool
4425 chkp_gate (void)
4426 {
4427 cgraph_node *node = cgraph_node::get (cfun->decl);
4428 return ((node != NULL
4429 && node->instrumentation_clone)
4430 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4431 }
4432
4433 namespace {
4434
4435 const pass_data pass_data_chkp =
4436 {
4437 GIMPLE_PASS, /* type */
4438 "chkp", /* name */
4439 OPTGROUP_NONE, /* optinfo_flags */
4440 TV_NONE, /* tv_id */
4441 PROP_ssa | PROP_cfg, /* properties_required */
4442 0, /* properties_provided */
4443 0, /* properties_destroyed */
4444 0, /* todo_flags_start */
4445 TODO_verify_il
4446 | TODO_update_ssa /* todo_flags_finish */
4447 };
4448
4449 class pass_chkp : public gimple_opt_pass
4450 {
4451 public:
4452 pass_chkp (gcc::context *ctxt)
4453 : gimple_opt_pass (pass_data_chkp, ctxt)
4454 {}
4455
4456 /* opt_pass methods: */
4457 virtual opt_pass * clone ()
4458 {
4459 return new pass_chkp (m_ctxt);
4460 }
4461
4462 virtual bool gate (function *)
4463 {
4464 return chkp_gate ();
4465 }
4466
4467 virtual unsigned int execute (function *)
4468 {
4469 return chkp_execute ();
4470 }
4471
4472 }; // class pass_chkp
4473
4474 } // anon namespace
4475
4476 gimple_opt_pass *
4477 make_pass_chkp (gcc::context *ctxt)
4478 {
4479 return new pass_chkp (ctxt);
4480 }
4481
4482 #include "gt-tree-chkp.h"