improve construction of the original schedule
[gcc.git] / gcc / tree-chkp.c
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
55
56 /* Pointer Bounds Checker instruments code with memory checks to find
57 out-of-bounds memory accesses. Checks are performed by computing
58 bounds for each pointer and then comparing address of accessed
59 memory before pointer dereferencing.
60
61 1. Function clones.
62
63 See ipa-chkp.c.
64
65 2. Instrumentation.
66
67 There are few things to instrument:
68
69 a) Memory accesses - add checker calls to check address of accessed memory
70 against bounds of dereferenced pointer. Obviously safe memory
71 accesses like static variable access does not have to be instrumented
72 with checks.
73
74 Example:
75
76 val_2 = *p_1;
77
78 with 4 bytes access is transformed into:
79
80 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
81 D.1_4 = p_1 + 3;
82 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
83 val_2 = *p_1;
84
85 where __bound_tmp.1_3 are bounds computed for pointer p_1,
86 __builtin___chkp_bndcl is a lower bound check and
87 __builtin___chkp_bndcu is an upper bound check.
88
89 b) Pointer stores.
90
91 When pointer is stored in memory we need to store its bounds. To
92 achieve compatibility of instrumented code with regular codes
93 we have to keep data layout and store bounds in special bound tables
94 via special checker call. Implementation of bounds table may vary for
95 different platforms. It has to associate pointer value and its
96 location (it is required because we may have two equal pointers
97 with different bounds stored in different places) with bounds.
98 Another checker builtin allows to get bounds for specified pointer
99 loaded from specified location.
100
101 Example:
102
103 buf1[i_1] = &buf2;
104
105 is transformed into:
106
107 buf1[i_1] = &buf2;
108 D.1_2 = &buf1[i_1];
109 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
110
111 where __bound_tmp.1_2 are bounds of &buf2.
112
113 c) Static initialization.
114
115 The special case of pointer store is static pointer initialization.
116 Bounds initialization is performed in a few steps:
117 - register all static initializations in front-end using
118 chkp_register_var_initializer
119 - when file compilation finishes we create functions with special
120 attribute 'chkp ctor' and put explicit initialization code
121 (assignments) for all statically initialized pointers.
122 - when checker constructor is compiled checker pass adds required
123 bounds initialization for all statically initialized pointers
124 - since we do not actually need excess pointers initialization
125 in checker constructor we remove such assignments from them
126
127 d) Calls.
128
129 For each call in the code we add additional arguments to pass
130 bounds for pointer arguments. We determine type of call arguments
131 using arguments list from function declaration; if function
132 declaration is not available we use function type; otherwise
133 (e.g. for unnamed arguments) we use type of passed value. Function
134 declaration/type is replaced with the instrumented one.
135
136 Example:
137
138 val_1 = foo (&buf1, &buf2, &buf1, 0);
139
140 is translated into:
141
142 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143 &buf1, __bound_tmp.1_2, 0);
144
145 e) Returns.
146
147 If function returns a pointer value we have to return bounds also.
148 A new operand was added for return statement to hold returned bounds.
149
150 Example:
151
152 return &_buf1;
153
154 is transformed into
155
156 return &_buf1, __bound_tmp.1_1;
157
158 3. Bounds computation.
159
160 Compiler is fully responsible for computing bounds to be used for each
161 memory access. The first step for bounds computation is to find the
162 origin of pointer dereferenced for memory access. Basing on pointer
163 origin we define a way to compute its bounds. There are just few
164 possible cases:
165
166 a) Pointer is returned by call.
167
168 In this case we use corresponding checker builtin method to obtain returned
169 bounds.
170
171 Example:
172
173 buf_1 = malloc (size_2);
174 foo (buf_1);
175
176 is translated into:
177
178 buf_1 = malloc (size_2);
179 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180 foo (buf_1, __bound_tmp.1_3);
181
182 b) Pointer is an address of an object.
183
184 In this case compiler tries to compute objects size and create corresponding
185 bounds. If object has incomplete type then special checker builtin is used to
186 obtain its size at runtime.
187
188 Example:
189
190 foo ()
191 {
192 <unnamed type> __bound_tmp.3;
193 static int buf[100];
194
195 <bb 3>:
196 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
197
198 <bb 2>:
199 return &buf, __bound_tmp.3_2;
200 }
201
202 Example:
203
204 Address of an object 'extern int buf[]' with incomplete type is
205 returned.
206
207 foo ()
208 {
209 <unnamed type> __bound_tmp.4;
210 long unsigned int __size_tmp.3;
211
212 <bb 3>:
213 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
215
216 <bb 2>:
217 return &buf, __bound_tmp.4_3;
218 }
219
220 c) Pointer is the result of object narrowing.
221
222 It happens when we use pointer to an object to compute pointer to a part
223 of an object. E.g. we take pointer to a field of a structure. In this
224 case we perform bounds intersection using bounds of original object and
225 bounds of object's part (which are computed basing on its type).
226
227 There may be some debatable questions about when narrowing should occur
228 and when it should not. To avoid false bound violations in correct
229 programs we do not perform narrowing when address of an array element is
230 obtained (it has address of the whole array) and when address of the first
231 structure field is obtained (because it is guaranteed to be equal to
232 address of the whole structure and it is legal to cast it back to structure).
233
234 Default narrowing behavior may be changed using compiler flags.
235
236 Example:
237
238 In this example address of the second structure field is returned.
239
240 foo (struct A * p, __bounds_type __bounds_of_p)
241 {
242 <unnamed type> __bound_tmp.3;
243 int * _2;
244 int * _5;
245
246 <bb 2>:
247 _5 = &p_1(D)->second_field;
248 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
250 __bounds_of_p_3(D));
251 _2 = &p_1(D)->second_field;
252 return _2, __bound_tmp.3_8;
253 }
254
255 Example:
256
257 In this example address of the first field of array element is returned.
258
259 foo (struct A * p, __bounds_type __bounds_of_p, int i)
260 {
261 long unsigned int _3;
262 long unsigned int _4;
263 struct A * _6;
264 int * _7;
265
266 <bb 2>:
267 _3 = (long unsigned int) i_1(D);
268 _4 = _3 * 8;
269 _6 = p_5(D) + _4;
270 _7 = &_6->first_field;
271 return _7, __bounds_of_p_2(D);
272 }
273
274
275 d) Pointer is the result of pointer arithmetic or type cast.
276
277 In this case bounds of the base pointer are used. In case of binary
278 operation producing a pointer we are analyzing data flow further
279 looking for operand's bounds. One operand is considered as a base
280 if it has some valid bounds. If we fall into a case when none of
281 operands (or both of them) has valid bounds, a default bounds value
282 is used.
283
284 Trying to find out bounds for binary operations we may fall into
285 cyclic dependencies for pointers. To avoid infinite recursion all
286 walked phi nodes instantly obtain corresponding bounds but created
287 bounds are marked as incomplete. It helps us to stop DF walk during
288 bounds search.
289
290 When we reach pointer source, some args of incomplete bounds phi obtain
291 valid bounds and those values are propagated further through phi nodes.
292 If no valid bounds were found for phi node then we mark its result as
293 invalid bounds. Process stops when all incomplete bounds become either
294 valid or invalid and we are able to choose a pointer base.
295
296 e) Pointer is loaded from the memory.
297
298 In this case we just need to load bounds from the bounds table.
299
300 Example:
301
302 foo ()
303 {
304 <unnamed type> __bound_tmp.3;
305 static int * buf;
306 int * _2;
307
308 <bb 2>:
309 _2 = buf;
310 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 return _2, __bound_tmp.3_4;
312 }
313
314 */
315
316 typedef void (*assign_handler)(tree, tree, void *);
317
318 static tree chkp_get_zero_bounds ();
319 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
320 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
321 gimple_stmt_iterator *iter);
322 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
323 tree *elt, bool *safe,
324 bool *bitfield,
325 tree *bounds,
326 gimple_stmt_iterator *iter,
327 bool innermost_bounds);
328
329 #define chkp_bndldx_fndecl \
330 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
331 #define chkp_bndstx_fndecl \
332 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
333 #define chkp_checkl_fndecl \
334 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
335 #define chkp_checku_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
337 #define chkp_bndmk_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
339 #define chkp_ret_bnd_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
341 #define chkp_intersect_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
343 #define chkp_narrow_bounds_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
345 #define chkp_sizeof_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
347 #define chkp_extract_lower_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
349 #define chkp_extract_upper_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
351
352 static GTY (()) tree chkp_uintptr_type;
353
354 static GTY (()) tree chkp_zero_bounds_var;
355 static GTY (()) tree chkp_none_bounds_var;
356
357 static GTY (()) basic_block entry_block;
358 static GTY (()) tree zero_bounds;
359 static GTY (()) tree none_bounds;
360 static GTY (()) tree incomplete_bounds;
361 static GTY (()) tree tmp_var;
362 static GTY (()) tree size_tmp_var;
363 static GTY (()) bitmap chkp_abnormal_copies;
364
365 struct hash_set<tree> *chkp_invalid_bounds;
366 struct hash_set<tree> *chkp_completed_bounds_set;
367 struct hash_map<tree, tree> *chkp_reg_bounds;
368 struct hash_map<tree, tree> *chkp_bound_vars;
369 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
370 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
371 struct hash_map<tree, tree> *chkp_bounds_map;
372 struct hash_map<tree, tree> *chkp_static_var_bounds;
373
374 static bool in_chkp_pass;
375
376 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
377 #define CHKP_SIZE_TMP_NAME "__size_tmp"
378 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
379 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
380 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
381 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
382 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
383
384 /* Static checker constructors may become very large and their
385 compilation with optimization may take too much time.
386 Therefore we put a limit to number of statements in one
387 constructor. Tests with 100 000 statically initialized
388 pointers showed following compilation times on Sandy Bridge
389 server (used -O2):
390 limit 100 => ~18 sec.
391 limit 300 => ~22 sec.
392 limit 1000 => ~30 sec.
393 limit 3000 => ~49 sec.
394 limit 5000 => ~55 sec.
395 limit 10000 => ~76 sec.
396 limit 100000 => ~532 sec. */
397 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
398
399 struct chkp_ctor_stmt_list
400 {
401 tree stmts;
402 int avail;
403 };
404
405 /* Return 1 if function FNDECL is instrumented by Pointer
406 Bounds Checker. */
407 bool
408 chkp_function_instrumented_p (tree fndecl)
409 {
410 return fndecl
411 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
412 }
413
414 /* Mark function FNDECL as instrumented. */
415 void
416 chkp_function_mark_instrumented (tree fndecl)
417 {
418 if (chkp_function_instrumented_p (fndecl))
419 return;
420
421 DECL_ATTRIBUTES (fndecl)
422 = tree_cons (get_identifier ("chkp instrumented"), NULL,
423 DECL_ATTRIBUTES (fndecl));
424 }
425
426 /* Return true when STMT is builtin call to instrumentation function
427 corresponding to CODE. */
428
429 bool
430 chkp_gimple_call_builtin_p (gimple *call,
431 enum built_in_function code)
432 {
433 tree fndecl;
434 if (is_gimple_call (call)
435 && (fndecl = targetm.builtin_chkp_function (code))
436 && gimple_call_fndecl (call) == fndecl)
437 return true;
438 return false;
439 }
440
441 /* Emit code to build zero bounds and return RTL holding
442 the result. */
443 rtx
444 chkp_expand_zero_bounds ()
445 {
446 tree zero_bnd;
447
448 if (flag_chkp_use_static_const_bounds)
449 zero_bnd = chkp_get_zero_bounds_var ();
450 else
451 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
452 integer_zero_node);
453 return expand_normal (zero_bnd);
454 }
455
456 /* Emit code to store zero bounds for PTR located at MEM. */
457 void
458 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
459 {
460 tree zero_bnd, bnd, addr, bndstx;
461
462 if (flag_chkp_use_static_const_bounds)
463 zero_bnd = chkp_get_zero_bounds_var ();
464 else
465 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
466 integer_zero_node);
467 bnd = make_tree (pointer_bounds_type_node,
468 assign_temp (pointer_bounds_type_node, 0, 1));
469 addr = build1 (ADDR_EXPR,
470 build_pointer_type (TREE_TYPE (mem)), mem);
471 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
472
473 expand_assignment (bnd, zero_bnd, false);
474 expand_normal (bndstx);
475 }
476
477 /* Build retbnd call for returned value RETVAL.
478
479 If BNDVAL is not NULL then result is stored
480 in it. Otherwise a temporary is created to
481 hold returned value.
482
483 GSI points to a position for a retbnd call
484 and is set to created stmt.
485
486 Cgraph edge is created for a new call if
487 UPDATE_EDGE is 1.
488
489 Obtained bounds are returned. */
490 tree
491 chkp_insert_retbnd_call (tree bndval, tree retval,
492 gimple_stmt_iterator *gsi)
493 {
494 gimple *call;
495
496 if (!bndval)
497 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
498
499 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
500 gimple_call_set_lhs (call, bndval);
501 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
502
503 return bndval;
504 }
505
506 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
507 arguments. */
508
509 gcall *
510 chkp_copy_call_skip_bounds (gcall *call)
511 {
512 bitmap bounds;
513 unsigned i;
514
515 bitmap_obstack_initialize (NULL);
516 bounds = BITMAP_ALLOC (NULL);
517
518 for (i = 0; i < gimple_call_num_args (call); i++)
519 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
520 bitmap_set_bit (bounds, i);
521
522 if (!bitmap_empty_p (bounds))
523 call = gimple_call_copy_skip_args (call, bounds);
524 gimple_call_set_with_bounds (call, false);
525
526 BITMAP_FREE (bounds);
527 bitmap_obstack_release (NULL);
528
529 return call;
530 }
531
532 /* Redirect edge E to the correct node according to call_stmt.
533 Return 1 if bounds removal from call_stmt should be done
534 instead of redirection. */
535
536 bool
537 chkp_redirect_edge (cgraph_edge *e)
538 {
539 bool instrumented = false;
540 tree decl = e->callee->decl;
541
542 if (e->callee->instrumentation_clone
543 || chkp_function_instrumented_p (decl))
544 instrumented = true;
545
546 if (instrumented
547 && !gimple_call_with_bounds_p (e->call_stmt))
548 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
549 else if (!instrumented
550 && gimple_call_with_bounds_p (e->call_stmt)
551 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
552 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
553 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
554 {
555 if (e->callee->instrumented_version)
556 e->redirect_callee (e->callee->instrumented_version);
557 else
558 {
559 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
560 /* Avoid bounds removal if all args will be removed. */
561 if (!args || TREE_VALUE (args) != void_type_node)
562 return true;
563 else
564 gimple_call_set_with_bounds (e->call_stmt, false);
565 }
566 }
567
568 return false;
569 }
570
571 /* Mark statement S to not be instrumented. */
572 static void
573 chkp_mark_stmt (gimple *s)
574 {
575 gimple_set_plf (s, GF_PLF_1, true);
576 }
577
578 /* Mark statement S to be instrumented. */
579 static void
580 chkp_unmark_stmt (gimple *s)
581 {
582 gimple_set_plf (s, GF_PLF_1, false);
583 }
584
585 /* Return 1 if statement S should not be instrumented. */
586 static bool
587 chkp_marked_stmt_p (gimple *s)
588 {
589 return gimple_plf (s, GF_PLF_1);
590 }
591
592 /* Get var to be used for bound temps. */
593 static tree
594 chkp_get_tmp_var (void)
595 {
596 if (!tmp_var)
597 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
598
599 return tmp_var;
600 }
601
602 /* Get SSA_NAME to be used as temp. */
603 static tree
604 chkp_get_tmp_reg (gimple *stmt)
605 {
606 if (in_chkp_pass)
607 return make_ssa_name (chkp_get_tmp_var (), stmt);
608
609 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
610 CHKP_BOUND_TMP_NAME);
611 }
612
613 /* Get var to be used for size temps. */
614 static tree
615 chkp_get_size_tmp_var (void)
616 {
617 if (!size_tmp_var)
618 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
619
620 return size_tmp_var;
621 }
622
623 /* Register bounds BND for address of OBJ. */
624 static void
625 chkp_register_addr_bounds (tree obj, tree bnd)
626 {
627 if (bnd == incomplete_bounds)
628 return;
629
630 chkp_reg_addr_bounds->put (obj, bnd);
631
632 if (dump_file && (dump_flags & TDF_DETAILS))
633 {
634 fprintf (dump_file, "Regsitered bound ");
635 print_generic_expr (dump_file, bnd, 0);
636 fprintf (dump_file, " for address of ");
637 print_generic_expr (dump_file, obj, 0);
638 fprintf (dump_file, "\n");
639 }
640 }
641
642 /* Return bounds registered for address of OBJ. */
643 static tree
644 chkp_get_registered_addr_bounds (tree obj)
645 {
646 tree *slot = chkp_reg_addr_bounds->get (obj);
647 return slot ? *slot : NULL_TREE;
648 }
649
650 /* Mark BOUNDS as completed. */
651 static void
652 chkp_mark_completed_bounds (tree bounds)
653 {
654 chkp_completed_bounds_set->add (bounds);
655
656 if (dump_file && (dump_flags & TDF_DETAILS))
657 {
658 fprintf (dump_file, "Marked bounds ");
659 print_generic_expr (dump_file, bounds, 0);
660 fprintf (dump_file, " as completed\n");
661 }
662 }
663
664 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
665 static bool
666 chkp_completed_bounds (tree bounds)
667 {
668 return chkp_completed_bounds_set->contains (bounds);
669 }
670
671 /* Clear comleted bound marks. */
672 static void
673 chkp_erase_completed_bounds (void)
674 {
675 delete chkp_completed_bounds_set;
676 chkp_completed_bounds_set = new hash_set<tree>;
677 }
678
679 /* Mark BOUNDS associated with PTR as incomplete. */
680 static void
681 chkp_register_incomplete_bounds (tree bounds, tree ptr)
682 {
683 chkp_incomplete_bounds_map->put (bounds, ptr);
684
685 if (dump_file && (dump_flags & TDF_DETAILS))
686 {
687 fprintf (dump_file, "Regsitered incomplete bounds ");
688 print_generic_expr (dump_file, bounds, 0);
689 fprintf (dump_file, " for ");
690 print_generic_expr (dump_file, ptr, 0);
691 fprintf (dump_file, "\n");
692 }
693 }
694
695 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
696 static bool
697 chkp_incomplete_bounds (tree bounds)
698 {
699 if (bounds == incomplete_bounds)
700 return true;
701
702 if (chkp_completed_bounds (bounds))
703 return false;
704
705 return chkp_incomplete_bounds_map->get (bounds) != NULL;
706 }
707
708 /* Clear incomleted bound marks. */
709 static void
710 chkp_erase_incomplete_bounds (void)
711 {
712 delete chkp_incomplete_bounds_map;
713 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
714 }
715
716 /* Build and return bndmk call which creates bounds for structure
717 pointed by PTR. Structure should have complete type. */
718 tree
719 chkp_make_bounds_for_struct_addr (tree ptr)
720 {
721 tree type = TREE_TYPE (ptr);
722 tree size;
723
724 gcc_assert (POINTER_TYPE_P (type));
725
726 size = TYPE_SIZE (TREE_TYPE (type));
727
728 gcc_assert (size);
729
730 return build_call_nary (pointer_bounds_type_node,
731 build_fold_addr_expr (chkp_bndmk_fndecl),
732 2, ptr, size);
733 }
734
735 /* Traversal function for chkp_may_finish_incomplete_bounds.
736 Set RES to 0 if at least one argument of phi statement
737 defining bounds (passed in KEY arg) is unknown.
738 Traversal stops when first unknown phi argument is found. */
739 bool
740 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
741 bool *res)
742 {
743 gimple *phi;
744 unsigned i;
745
746 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
747
748 phi = SSA_NAME_DEF_STMT (bounds);
749
750 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
751
752 for (i = 0; i < gimple_phi_num_args (phi); i++)
753 {
754 tree phi_arg = gimple_phi_arg_def (phi, i);
755 if (!phi_arg)
756 {
757 *res = false;
758 /* Do not need to traverse further. */
759 return false;
760 }
761 }
762
763 return true;
764 }
765
766 /* Return 1 if all phi nodes created for bounds have their
767 arguments computed. */
768 static bool
769 chkp_may_finish_incomplete_bounds (void)
770 {
771 bool res = true;
772
773 chkp_incomplete_bounds_map
774 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
775
776 return res;
777 }
778
779 /* Helper function for chkp_finish_incomplete_bounds.
780 Recompute args for bounds phi node. */
781 bool
782 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
783 void *res ATTRIBUTE_UNUSED)
784 {
785 tree ptr = *slot;
786 gphi *bounds_phi;
787 gphi *ptr_phi;
788 unsigned i;
789
790 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
791 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
792
793 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
794 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
795
796 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
797 {
798 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
799 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
800
801 add_phi_arg (bounds_phi, bound_arg,
802 gimple_phi_arg_edge (ptr_phi, i),
803 UNKNOWN_LOCATION);
804 }
805
806 return true;
807 }
808
809 /* Mark BOUNDS as invalid. */
810 static void
811 chkp_mark_invalid_bounds (tree bounds)
812 {
813 chkp_invalid_bounds->add (bounds);
814
815 if (dump_file && (dump_flags & TDF_DETAILS))
816 {
817 fprintf (dump_file, "Marked bounds ");
818 print_generic_expr (dump_file, bounds, 0);
819 fprintf (dump_file, " as invalid\n");
820 }
821 }
822
823 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
824 static bool
825 chkp_valid_bounds (tree bounds)
826 {
827 if (bounds == zero_bounds || bounds == none_bounds)
828 return false;
829
830 return !chkp_invalid_bounds->contains (bounds);
831 }
832
833 /* Helper function for chkp_finish_incomplete_bounds.
834 Check all arguments of phi nodes trying to find
835 valid completed bounds. If there is at least one
836 such arg then bounds produced by phi node are marked
837 as valid completed bounds and all phi args are
838 recomputed. */
839 bool
840 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
841 {
842 gimple *phi;
843 unsigned i;
844
845 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
846
847 if (chkp_completed_bounds (bounds))
848 return true;
849
850 phi = SSA_NAME_DEF_STMT (bounds);
851
852 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
853
854 for (i = 0; i < gimple_phi_num_args (phi); i++)
855 {
856 tree phi_arg = gimple_phi_arg_def (phi, i);
857
858 gcc_assert (phi_arg);
859
860 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
861 {
862 *res = true;
863 chkp_mark_completed_bounds (bounds);
864 chkp_recompute_phi_bounds (bounds, slot, NULL);
865 return true;
866 }
867 }
868
869 return true;
870 }
871
872 /* Helper function for chkp_finish_incomplete_bounds.
873 Marks all incompleted bounds as invalid. */
874 bool
875 chkp_mark_invalid_bounds_walker (tree const &bounds,
876 tree *slot ATTRIBUTE_UNUSED,
877 void *res ATTRIBUTE_UNUSED)
878 {
879 if (!chkp_completed_bounds (bounds))
880 {
881 chkp_mark_invalid_bounds (bounds);
882 chkp_mark_completed_bounds (bounds);
883 }
884 return true;
885 }
886
887 /* When all bound phi nodes have all their args computed
888 we have enough info to find valid bounds. We iterate
889 through all incompleted bounds searching for valid
890 bounds. Found valid bounds are marked as completed
891 and all remaining incompleted bounds are recomputed.
892 Process continues until no new valid bounds may be
893 found. All remained incompleted bounds are marked as
894 invalid (i.e. have no valid source of bounds). */
895 static void
896 chkp_finish_incomplete_bounds (void)
897 {
898 bool found_valid;
899
900 while (found_valid)
901 {
902 found_valid = false;
903
904 chkp_incomplete_bounds_map->
905 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
906
907 if (found_valid)
908 chkp_incomplete_bounds_map->
909 traverse<void *, chkp_recompute_phi_bounds> (NULL);
910 }
911
912 chkp_incomplete_bounds_map->
913 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
914 chkp_incomplete_bounds_map->
915 traverse<void *, chkp_recompute_phi_bounds> (NULL);
916
917 chkp_erase_completed_bounds ();
918 chkp_erase_incomplete_bounds ();
919 }
920
921 /* Return 1 if type TYPE is a pointer type or a
922 structure having a pointer type as one of its fields.
923 Otherwise return 0. */
924 bool
925 chkp_type_has_pointer (const_tree type)
926 {
927 bool res = false;
928
929 if (BOUNDED_TYPE_P (type))
930 res = true;
931 else if (RECORD_OR_UNION_TYPE_P (type))
932 {
933 tree field;
934
935 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
936 if (TREE_CODE (field) == FIELD_DECL)
937 res = res || chkp_type_has_pointer (TREE_TYPE (field));
938 }
939 else if (TREE_CODE (type) == ARRAY_TYPE)
940 res = chkp_type_has_pointer (TREE_TYPE (type));
941
942 return res;
943 }
944
945 unsigned
946 chkp_type_bounds_count (const_tree type)
947 {
948 unsigned res = 0;
949
950 if (!type)
951 res = 0;
952 else if (BOUNDED_TYPE_P (type))
953 res = 1;
954 else if (RECORD_OR_UNION_TYPE_P (type))
955 {
956 bitmap have_bound;
957
958 bitmap_obstack_initialize (NULL);
959 have_bound = BITMAP_ALLOC (NULL);
960 chkp_find_bound_slots (type, have_bound);
961 res = bitmap_count_bits (have_bound);
962 BITMAP_FREE (have_bound);
963 bitmap_obstack_release (NULL);
964 }
965
966 return res;
967 }
968
969 /* Get bounds associated with NODE via
970 chkp_set_bounds call. */
971 tree
972 chkp_get_bounds (tree node)
973 {
974 tree *slot;
975
976 if (!chkp_bounds_map)
977 return NULL_TREE;
978
979 slot = chkp_bounds_map->get (node);
980 return slot ? *slot : NULL_TREE;
981 }
982
983 /* Associate bounds VAL with NODE. */
984 void
985 chkp_set_bounds (tree node, tree val)
986 {
987 if (!chkp_bounds_map)
988 chkp_bounds_map = new hash_map<tree, tree>;
989
990 chkp_bounds_map->put (node, val);
991 }
992
993 /* Check if statically initialized variable VAR require
994 static bounds initialization. If VAR is added into
995 bounds initlization list then 1 is returned. Otherwise
996 return 0. */
997 extern bool
998 chkp_register_var_initializer (tree var)
999 {
1000 if (!flag_check_pointer_bounds
1001 || DECL_INITIAL (var) == error_mark_node)
1002 return false;
1003
1004 gcc_assert (TREE_CODE (var) == VAR_DECL);
1005 gcc_assert (DECL_INITIAL (var));
1006
1007 if (TREE_STATIC (var)
1008 && chkp_type_has_pointer (TREE_TYPE (var)))
1009 {
1010 varpool_node::get_create (var)->need_bounds_init = 1;
1011 return true;
1012 }
1013
1014 return false;
1015 }
1016
1017 /* Helper function for chkp_finish_file.
1018
1019 Add new modification statement (RHS is assigned to LHS)
1020 into list of static initializer statementes (passed in ARG).
1021 If statements list becomes too big, emit checker constructor
1022 and start the new one. */
1023 static void
1024 chkp_add_modification_to_stmt_list (tree lhs,
1025 tree rhs,
1026 void *arg)
1027 {
1028 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1029 tree modify;
1030
1031 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1032 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1033
1034 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1035 append_to_statement_list (modify, &stmts->stmts);
1036
1037 stmts->avail--;
1038 }
1039
1040 /* Build and return ADDR_EXPR for specified object OBJ. */
1041 static tree
1042 chkp_build_addr_expr (tree obj)
1043 {
1044 return TREE_CODE (obj) == TARGET_MEM_REF
1045 ? tree_mem_ref_addr (ptr_type_node, obj)
1046 : build_fold_addr_expr (obj);
1047 }
1048
1049 /* Helper function for chkp_finish_file.
1050 Initialize bound variable BND_VAR with bounds of variable
1051 VAR to statements list STMTS. If statements list becomes
1052 too big, emit checker constructor and start the new one. */
1053 static void
1054 chkp_output_static_bounds (tree bnd_var, tree var,
1055 struct chkp_ctor_stmt_list *stmts)
1056 {
1057 tree lb, ub, size;
1058
1059 if (TREE_CODE (var) == STRING_CST)
1060 {
1061 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1062 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1063 }
1064 else if (DECL_SIZE (var)
1065 && !chkp_variable_size_type (TREE_TYPE (var)))
1066 {
1067 /* Compute bounds using statically known size. */
1068 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1069 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1070 }
1071 else
1072 {
1073 /* Compute bounds using dynamic size. */
1074 tree call;
1075
1076 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1077 call = build1 (ADDR_EXPR,
1078 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1079 chkp_sizeof_fndecl);
1080 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1081 call, 1, var);
1082
1083 if (flag_chkp_zero_dynamic_size_as_infinite)
1084 {
1085 tree max_size, cond;
1086
1087 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1088 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1089 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1090 }
1091
1092 size = size_binop (MINUS_EXPR, size, size_one_node);
1093 }
1094
1095 ub = size_binop (PLUS_EXPR, lb, size);
1096 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1097 &stmts->stmts);
1098 if (stmts->avail <= 0)
1099 {
1100 cgraph_build_static_cdtor ('B', stmts->stmts,
1101 MAX_RESERVED_INIT_PRIORITY + 2);
1102 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1103 stmts->stmts = NULL;
1104 }
1105 }
1106
1107 /* Return entry block to be used for checker initilization code.
1108 Create new block if required. */
1109 static basic_block
1110 chkp_get_entry_block (void)
1111 {
1112 if (!entry_block)
1113 entry_block
1114 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1115
1116 return entry_block;
1117 }
1118
1119 /* Return a bounds var to be used for pointer var PTR_VAR. */
1120 static tree
1121 chkp_get_bounds_var (tree ptr_var)
1122 {
1123 tree bnd_var;
1124 tree *slot;
1125
1126 slot = chkp_bound_vars->get (ptr_var);
1127 if (slot)
1128 bnd_var = *slot;
1129 else
1130 {
1131 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1132 CHKP_BOUND_TMP_NAME);
1133 chkp_bound_vars->put (ptr_var, bnd_var);
1134 }
1135
1136 return bnd_var;
1137 }
1138
1139 /* If BND is an abnormal bounds copy, return a copied value.
1140 Otherwise return BND. */
1141 static tree
1142 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1143 {
1144 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1145 {
1146 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1147 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1148 bnd = gimple_assign_rhs1 (bnd_def);
1149 }
1150
1151 return bnd;
1152 }
1153
1154 /* Register bounds BND for object PTR in global bounds table.
1155 A copy of bounds may be created for abnormal ssa names.
1156 Returns bounds to use for PTR. */
1157 static tree
1158 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1159 {
1160 bool abnormal_ptr;
1161
1162 if (!chkp_reg_bounds)
1163 return bnd;
1164
1165 /* Do nothing if bounds are incomplete_bounds
1166 because it means bounds will be recomputed. */
1167 if (bnd == incomplete_bounds)
1168 return bnd;
1169
1170 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1171 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1172 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1173
1174 /* A single bounds value may be reused multiple times for
1175 different pointer values. It may cause coalescing issues
1176 for abnormal SSA names. To avoid it we create a bounds
1177 copy in case it is computed for abnormal SSA name.
1178
1179 We also cannot reuse such created copies for other pointers */
1180 if (abnormal_ptr
1181 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1182 {
1183 tree bnd_var = NULL_TREE;
1184
1185 if (abnormal_ptr)
1186 {
1187 if (SSA_NAME_VAR (ptr))
1188 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1189 }
1190 else
1191 bnd_var = chkp_get_tmp_var ();
1192
1193 /* For abnormal copies we may just find original
1194 bounds and use them. */
1195 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1196 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1197 /* For undefined values we usually use none bounds
1198 value but in case of abnormal edge it may cause
1199 coalescing failures. Use default definition of
1200 bounds variable instead to avoid it. */
1201 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1202 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1203 {
1204 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1205
1206 if (dump_file && (dump_flags & TDF_DETAILS))
1207 {
1208 fprintf (dump_file, "Using default def bounds ");
1209 print_generic_expr (dump_file, bnd, 0);
1210 fprintf (dump_file, " for abnormal default def SSA name ");
1211 print_generic_expr (dump_file, ptr, 0);
1212 fprintf (dump_file, "\n");
1213 }
1214 }
1215 else
1216 {
1217 tree copy;
1218 gimple *def = SSA_NAME_DEF_STMT (ptr);
1219 gimple *assign;
1220 gimple_stmt_iterator gsi;
1221
1222 if (bnd_var)
1223 copy = make_ssa_name (bnd_var);
1224 else
1225 copy = make_temp_ssa_name (pointer_bounds_type_node,
1226 NULL,
1227 CHKP_BOUND_TMP_NAME);
1228 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1229 assign = gimple_build_assign (copy, bnd);
1230
1231 if (dump_file && (dump_flags & TDF_DETAILS))
1232 {
1233 fprintf (dump_file, "Creating a copy of bounds ");
1234 print_generic_expr (dump_file, bnd, 0);
1235 fprintf (dump_file, " for abnormal SSA name ");
1236 print_generic_expr (dump_file, ptr, 0);
1237 fprintf (dump_file, "\n");
1238 }
1239
1240 if (gimple_code (def) == GIMPLE_NOP)
1241 {
1242 gsi = gsi_last_bb (chkp_get_entry_block ());
1243 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1244 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1245 else
1246 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1247 }
1248 else
1249 {
1250 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1251 /* Sometimes (e.g. when we load a pointer from a
1252 memory) bounds are produced later than a pointer.
1253 We need to insert bounds copy appropriately. */
1254 if (gimple_code (bnd_def) != GIMPLE_NOP
1255 && stmt_dominates_stmt_p (def, bnd_def))
1256 gsi = gsi_for_stmt (bnd_def);
1257 else
1258 gsi = gsi_for_stmt (def);
1259 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1260 }
1261
1262 bnd = copy;
1263 }
1264
1265 if (abnormal_ptr)
1266 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1267 }
1268
1269 chkp_reg_bounds->put (ptr, bnd);
1270
1271 if (dump_file && (dump_flags & TDF_DETAILS))
1272 {
1273 fprintf (dump_file, "Regsitered bound ");
1274 print_generic_expr (dump_file, bnd, 0);
1275 fprintf (dump_file, " for pointer ");
1276 print_generic_expr (dump_file, ptr, 0);
1277 fprintf (dump_file, "\n");
1278 }
1279
1280 return bnd;
1281 }
1282
1283 /* Get bounds registered for object PTR in global bounds table. */
1284 static tree
1285 chkp_get_registered_bounds (tree ptr)
1286 {
1287 tree *slot;
1288
1289 if (!chkp_reg_bounds)
1290 return NULL_TREE;
1291
1292 slot = chkp_reg_bounds->get (ptr);
1293 return slot ? *slot : NULL_TREE;
1294 }
1295
1296 /* Add bound retvals to return statement pointed by GSI. */
1297
1298 static void
1299 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1300 {
1301 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1302 tree retval = gimple_return_retval (ret);
1303 tree ret_decl = DECL_RESULT (cfun->decl);
1304 tree bounds;
1305
1306 if (!retval)
1307 return;
1308
1309 if (BOUNDED_P (ret_decl))
1310 {
1311 bounds = chkp_find_bounds (retval, gsi);
1312 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1313 gimple_return_set_retbnd (ret, bounds);
1314 }
1315
1316 update_stmt (ret);
1317 }
1318
1319 /* Force OP to be suitable for using as an argument for call.
1320 New statements (if any) go to SEQ. */
1321 static tree
1322 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1323 {
1324 gimple_seq stmts;
1325 gimple_stmt_iterator si;
1326
1327 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1328
1329 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1330 chkp_mark_stmt (gsi_stmt (si));
1331
1332 gimple_seq_add_seq (seq, stmts);
1333
1334 return op;
1335 }
1336
1337 /* Generate lower bound check for memory access by ADDR.
1338 Check is inserted before the position pointed by ITER.
1339 DIRFLAG indicates whether memory access is load or store. */
1340 static void
1341 chkp_check_lower (tree addr, tree bounds,
1342 gimple_stmt_iterator iter,
1343 location_t location,
1344 tree dirflag)
1345 {
1346 gimple_seq seq;
1347 gimple *check;
1348 tree node;
1349
1350 if (!chkp_function_instrumented_p (current_function_decl)
1351 && bounds == chkp_get_zero_bounds ())
1352 return;
1353
1354 if (dirflag == integer_zero_node
1355 && !flag_chkp_check_read)
1356 return;
1357
1358 if (dirflag == integer_one_node
1359 && !flag_chkp_check_write)
1360 return;
1361
1362 seq = NULL;
1363
1364 node = chkp_force_gimple_call_op (addr, &seq);
1365
1366 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1367 chkp_mark_stmt (check);
1368 gimple_call_set_with_bounds (check, true);
1369 gimple_set_location (check, location);
1370 gimple_seq_add_stmt (&seq, check);
1371
1372 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1373
1374 if (dump_file && (dump_flags & TDF_DETAILS))
1375 {
1376 gimple *before = gsi_stmt (iter);
1377 fprintf (dump_file, "Generated lower bound check for statement ");
1378 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1379 fprintf (dump_file, " ");
1380 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1381 }
1382 }
1383
1384 /* Generate upper bound check for memory access by ADDR.
1385 Check is inserted before the position pointed by ITER.
1386 DIRFLAG indicates whether memory access is load or store. */
1387 static void
1388 chkp_check_upper (tree addr, tree bounds,
1389 gimple_stmt_iterator iter,
1390 location_t location,
1391 tree dirflag)
1392 {
1393 gimple_seq seq;
1394 gimple *check;
1395 tree node;
1396
1397 if (!chkp_function_instrumented_p (current_function_decl)
1398 && bounds == chkp_get_zero_bounds ())
1399 return;
1400
1401 if (dirflag == integer_zero_node
1402 && !flag_chkp_check_read)
1403 return;
1404
1405 if (dirflag == integer_one_node
1406 && !flag_chkp_check_write)
1407 return;
1408
1409 seq = NULL;
1410
1411 node = chkp_force_gimple_call_op (addr, &seq);
1412
1413 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1414 chkp_mark_stmt (check);
1415 gimple_call_set_with_bounds (check, true);
1416 gimple_set_location (check, location);
1417 gimple_seq_add_stmt (&seq, check);
1418
1419 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1420
1421 if (dump_file && (dump_flags & TDF_DETAILS))
1422 {
1423 gimple *before = gsi_stmt (iter);
1424 fprintf (dump_file, "Generated upper bound check for statement ");
1425 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1426 fprintf (dump_file, " ");
1427 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1428 }
1429 }
1430
1431 /* Generate lower and upper bound checks for memory access
1432 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1433 are inserted before the position pointed by ITER.
1434 DIRFLAG indicates whether memory access is load or store. */
1435 void
1436 chkp_check_mem_access (tree first, tree last, tree bounds,
1437 gimple_stmt_iterator iter,
1438 location_t location,
1439 tree dirflag)
1440 {
1441 chkp_check_lower (first, bounds, iter, location, dirflag);
1442 chkp_check_upper (last, bounds, iter, location, dirflag);
1443 }
1444
1445 /* Replace call to _bnd_chk_* pointed by GSI with
1446 bndcu and bndcl calls. DIRFLAG determines whether
1447 check is for read or write. */
1448
1449 void
1450 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1451 tree dirflag)
1452 {
1453 gimple_stmt_iterator call_iter = *gsi;
1454 gimple *call = gsi_stmt (*gsi);
1455 tree fndecl = gimple_call_fndecl (call);
1456 tree addr = gimple_call_arg (call, 0);
1457 tree bounds = chkp_find_bounds (addr, gsi);
1458
1459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1460 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1461 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1462
1463 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1464 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1465
1466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1467 {
1468 tree size = gimple_call_arg (call, 1);
1469 addr = fold_build_pointer_plus (addr, size);
1470 addr = fold_build_pointer_plus_hwi (addr, -1);
1471 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1472 }
1473
1474 gsi_remove (&call_iter, true);
1475 }
1476
1477 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1478 corresponding bounds extract call. */
1479
1480 void
1481 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1482 {
1483 gimple *call = gsi_stmt (*gsi);
1484 tree fndecl = gimple_call_fndecl (call);
1485 tree addr = gimple_call_arg (call, 0);
1486 tree bounds = chkp_find_bounds (addr, gsi);
1487 gimple *extract;
1488
1489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1490 fndecl = chkp_extract_lower_fndecl;
1491 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1492 fndecl = chkp_extract_upper_fndecl;
1493 else
1494 gcc_unreachable ();
1495
1496 extract = gimple_build_call (fndecl, 1, bounds);
1497 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1498 chkp_mark_stmt (extract);
1499
1500 gsi_replace (gsi, extract, false);
1501 }
1502
1503 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1504 static tree
1505 chkp_build_component_ref (tree obj, tree field)
1506 {
1507 tree res;
1508
1509 /* If object is TMR then we do not use component_ref but
1510 add offset instead. We need it to be able to get addr
1511 of the reasult later. */
1512 if (TREE_CODE (obj) == TARGET_MEM_REF)
1513 {
1514 tree offs = TMR_OFFSET (obj);
1515 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1516 offs, DECL_FIELD_OFFSET (field));
1517
1518 gcc_assert (offs);
1519
1520 res = copy_node (obj);
1521 TREE_TYPE (res) = TREE_TYPE (field);
1522 TMR_OFFSET (res) = offs;
1523 }
1524 else
1525 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1526
1527 return res;
1528 }
1529
1530 /* Return ARRAY_REF for array ARR and index IDX with
1531 specified element type ETYPE and element size ESIZE. */
1532 static tree
1533 chkp_build_array_ref (tree arr, tree etype, tree esize,
1534 unsigned HOST_WIDE_INT idx)
1535 {
1536 tree index = build_int_cst (size_type_node, idx);
1537 tree res;
1538
1539 /* If object is TMR then we do not use array_ref but
1540 add offset instead. We need it to be able to get addr
1541 of the reasult later. */
1542 if (TREE_CODE (arr) == TARGET_MEM_REF)
1543 {
1544 tree offs = TMR_OFFSET (arr);
1545
1546 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1547 esize, index);
1548 gcc_assert(esize);
1549
1550 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1551 offs, esize);
1552 gcc_assert (offs);
1553
1554 res = copy_node (arr);
1555 TREE_TYPE (res) = etype;
1556 TMR_OFFSET (res) = offs;
1557 }
1558 else
1559 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1560
1561 return res;
1562 }
1563
1564 /* Helper function for chkp_add_bounds_to_call_stmt.
1565 Fill ALL_BOUNDS output array with created bounds.
1566
1567 OFFS is used for recursive calls and holds basic
1568 offset of TYPE in outer structure in bits.
1569
1570 ITER points a position where bounds are searched.
1571
1572 ALL_BOUNDS[i] is filled with elem bounds if there
1573 is a field in TYPE which has pointer type and offset
1574 equal to i * POINTER_SIZE in bits. */
1575 static void
1576 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1577 HOST_WIDE_INT offs,
1578 gimple_stmt_iterator *iter)
1579 {
1580 tree type = TREE_TYPE (elem);
1581
1582 if (BOUNDED_TYPE_P (type))
1583 {
1584 if (!all_bounds[offs / POINTER_SIZE])
1585 {
1586 tree temp = make_temp_ssa_name (type, NULL, "");
1587 gimple *assign = gimple_build_assign (temp, elem);
1588 gimple_stmt_iterator gsi;
1589
1590 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1591 gsi = gsi_for_stmt (assign);
1592
1593 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1594 }
1595 }
1596 else if (RECORD_OR_UNION_TYPE_P (type))
1597 {
1598 tree field;
1599
1600 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1601 if (TREE_CODE (field) == FIELD_DECL)
1602 {
1603 tree base = unshare_expr (elem);
1604 tree field_ref = chkp_build_component_ref (base, field);
1605 HOST_WIDE_INT field_offs
1606 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1607 if (DECL_FIELD_OFFSET (field))
1608 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1609
1610 chkp_find_bounds_for_elem (field_ref, all_bounds,
1611 offs + field_offs, iter);
1612 }
1613 }
1614 else if (TREE_CODE (type) == ARRAY_TYPE)
1615 {
1616 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1617 tree etype = TREE_TYPE (type);
1618 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1619 unsigned HOST_WIDE_INT cur;
1620
1621 if (!maxval || integer_minus_onep (maxval))
1622 return;
1623
1624 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1625 {
1626 tree base = unshare_expr (elem);
1627 tree arr_elem = chkp_build_array_ref (base, etype,
1628 TYPE_SIZE (etype),
1629 cur);
1630 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1631 iter);
1632 }
1633 }
1634 }
1635
1636 /* Fill HAVE_BOUND output bitmap with information about
1637 bounds requred for object of type TYPE.
1638
1639 OFFS is used for recursive calls and holds basic
1640 offset of TYPE in outer structure in bits.
1641
1642 HAVE_BOUND[i] is set to 1 if there is a field
1643 in TYPE which has pointer type and offset
1644 equal to i * POINTER_SIZE - OFFS in bits. */
1645 void
1646 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1647 HOST_WIDE_INT offs)
1648 {
1649 if (BOUNDED_TYPE_P (type))
1650 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1651 else if (RECORD_OR_UNION_TYPE_P (type))
1652 {
1653 tree field;
1654
1655 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1656 if (TREE_CODE (field) == FIELD_DECL)
1657 {
1658 HOST_WIDE_INT field_offs = 0;
1659 if (DECL_FIELD_BIT_OFFSET (field))
1660 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1661 if (DECL_FIELD_OFFSET (field))
1662 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1663 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1664 offs + field_offs);
1665 }
1666 }
1667 else if (TREE_CODE (type) == ARRAY_TYPE)
1668 {
1669 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1670 tree etype = TREE_TYPE (type);
1671 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1672 unsigned HOST_WIDE_INT cur;
1673
1674 if (!maxval
1675 || TREE_CODE (maxval) != INTEGER_CST
1676 || integer_minus_onep (maxval))
1677 return;
1678
1679 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1680 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1681 }
1682 }
1683
1684 /* Fill bitmap RES with information about bounds for
1685 type TYPE. See chkp_find_bound_slots_1 for more
1686 details. */
1687 void
1688 chkp_find_bound_slots (const_tree type, bitmap res)
1689 {
1690 bitmap_clear (res);
1691 chkp_find_bound_slots_1 (type, res, 0);
1692 }
1693
1694 /* Return 1 if call to FNDECL should be instrumented
1695 and 0 otherwise. */
1696
1697 static bool
1698 chkp_instrument_normal_builtin (tree fndecl)
1699 {
1700 switch (DECL_FUNCTION_CODE (fndecl))
1701 {
1702 case BUILT_IN_STRLEN:
1703 case BUILT_IN_STRCPY:
1704 case BUILT_IN_STRNCPY:
1705 case BUILT_IN_STPCPY:
1706 case BUILT_IN_STPNCPY:
1707 case BUILT_IN_STRCAT:
1708 case BUILT_IN_STRNCAT:
1709 case BUILT_IN_MEMCPY:
1710 case BUILT_IN_MEMPCPY:
1711 case BUILT_IN_MEMSET:
1712 case BUILT_IN_MEMMOVE:
1713 case BUILT_IN_BZERO:
1714 case BUILT_IN_STRCMP:
1715 case BUILT_IN_STRNCMP:
1716 case BUILT_IN_BCMP:
1717 case BUILT_IN_MEMCMP:
1718 case BUILT_IN_MEMCPY_CHK:
1719 case BUILT_IN_MEMPCPY_CHK:
1720 case BUILT_IN_MEMMOVE_CHK:
1721 case BUILT_IN_MEMSET_CHK:
1722 case BUILT_IN_STRCPY_CHK:
1723 case BUILT_IN_STRNCPY_CHK:
1724 case BUILT_IN_STPCPY_CHK:
1725 case BUILT_IN_STPNCPY_CHK:
1726 case BUILT_IN_STRCAT_CHK:
1727 case BUILT_IN_STRNCAT_CHK:
1728 case BUILT_IN_MALLOC:
1729 case BUILT_IN_CALLOC:
1730 case BUILT_IN_REALLOC:
1731 return 1;
1732
1733 default:
1734 return 0;
1735 }
1736 }
1737
1738 /* Add bound arguments to call statement pointed by GSI.
1739 Also performs a replacement of user checker builtins calls
1740 with internal ones. */
1741
1742 static void
1743 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1744 {
1745 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1746 unsigned arg_no = 0;
1747 tree fndecl = gimple_call_fndecl (call);
1748 tree fntype;
1749 tree first_formal_arg;
1750 tree arg;
1751 bool use_fntype = false;
1752 tree op;
1753 ssa_op_iter iter;
1754 gcall *new_call;
1755
1756 /* Do nothing for internal functions. */
1757 if (gimple_call_internal_p (call))
1758 return;
1759
1760 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1761
1762 /* Do nothing if back-end builtin is called. */
1763 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1764 return;
1765
1766 /* Do nothing for some middle-end builtins. */
1767 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1768 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1769 return;
1770
1771 /* Do nothing for calls to not instrumentable functions. */
1772 if (fndecl && !chkp_instrumentable_p (fndecl))
1773 return;
1774
1775 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1776 and CHKP_COPY_PTR_BOUNDS. */
1777 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1778 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1779 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1780 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1781 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1782 return;
1783
1784 /* Check user builtins are replaced with checks. */
1785 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1786 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1787 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1788 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1789 {
1790 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1791 return;
1792 }
1793
1794 /* Check user builtins are replaced with bound extract. */
1795 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1796 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1797 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1798 {
1799 chkp_replace_extract_builtin (gsi);
1800 return;
1801 }
1802
1803 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1804 target narrow bounds call. */
1805 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1806 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1807 {
1808 tree arg = gimple_call_arg (call, 1);
1809 tree bounds = chkp_find_bounds (arg, gsi);
1810
1811 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1812 gimple_call_set_arg (call, 1, bounds);
1813 update_stmt (call);
1814
1815 return;
1816 }
1817
1818 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1819 bndstx call. */
1820 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1821 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1822 {
1823 tree addr = gimple_call_arg (call, 0);
1824 tree ptr = gimple_call_arg (call, 1);
1825 tree bounds = chkp_find_bounds (ptr, gsi);
1826 gimple_stmt_iterator iter = gsi_for_stmt (call);
1827
1828 chkp_build_bndstx (addr, ptr, bounds, gsi);
1829 gsi_remove (&iter, true);
1830
1831 return;
1832 }
1833
1834 if (!flag_chkp_instrument_calls)
1835 return;
1836
1837 /* We instrument only some subset of builtins. We also instrument
1838 builtin calls to be inlined. */
1839 if (fndecl
1840 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1841 && !chkp_instrument_normal_builtin (fndecl))
1842 {
1843 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1844 return;
1845
1846 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1847 if (!clone
1848 || !gimple_has_body_p (clone->decl))
1849 return;
1850 }
1851
1852 /* If function decl is available then use it for
1853 formal arguments list. Otherwise use function type. */
1854 if (fndecl && DECL_ARGUMENTS (fndecl))
1855 first_formal_arg = DECL_ARGUMENTS (fndecl);
1856 else
1857 {
1858 first_formal_arg = TYPE_ARG_TYPES (fntype);
1859 use_fntype = true;
1860 }
1861
1862 /* Fill vector of new call args. */
1863 vec<tree> new_args = vNULL;
1864 new_args.create (gimple_call_num_args (call));
1865 arg = first_formal_arg;
1866 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1867 {
1868 tree call_arg = gimple_call_arg (call, arg_no);
1869 tree type;
1870
1871 /* Get arg type using formal argument description
1872 or actual argument type. */
1873 if (arg)
1874 if (use_fntype)
1875 if (TREE_VALUE (arg) != void_type_node)
1876 {
1877 type = TREE_VALUE (arg);
1878 arg = TREE_CHAIN (arg);
1879 }
1880 else
1881 type = TREE_TYPE (call_arg);
1882 else
1883 {
1884 type = TREE_TYPE (arg);
1885 arg = TREE_CHAIN (arg);
1886 }
1887 else
1888 type = TREE_TYPE (call_arg);
1889
1890 new_args.safe_push (call_arg);
1891
1892 if (BOUNDED_TYPE_P (type)
1893 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1894 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1895 else if (chkp_type_has_pointer (type))
1896 {
1897 HOST_WIDE_INT max_bounds
1898 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1899 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1900 HOST_WIDE_INT bnd_no;
1901
1902 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1903
1904 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1905
1906 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1907 if (all_bounds[bnd_no])
1908 new_args.safe_push (all_bounds[bnd_no]);
1909
1910 free (all_bounds);
1911 }
1912 }
1913
1914 if (new_args.length () == gimple_call_num_args (call))
1915 new_call = call;
1916 else
1917 {
1918 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1919 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1920 gimple_call_copy_flags (new_call, call);
1921 gimple_call_set_chain (new_call, gimple_call_chain (call));
1922 }
1923 new_args.release ();
1924
1925 /* For direct calls fndecl is replaced with instrumented version. */
1926 if (fndecl)
1927 {
1928 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1929 gimple_call_set_fndecl (new_call, new_decl);
1930 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1931 }
1932 /* For indirect call we should fix function pointer type if
1933 pass some bounds. */
1934 else if (new_call != call)
1935 {
1936 tree type = gimple_call_fntype (call);
1937 type = chkp_copy_function_type_adding_bounds (type);
1938 gimple_call_set_fntype (new_call, type);
1939 }
1940
1941 /* replace old call statement with the new one. */
1942 if (call != new_call)
1943 {
1944 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1945 {
1946 SSA_NAME_DEF_STMT (op) = new_call;
1947 }
1948 gsi_replace (gsi, new_call, true);
1949 }
1950 else
1951 update_stmt (new_call);
1952
1953 gimple_call_set_with_bounds (new_call, true);
1954 }
1955
1956 /* Return constant static bounds var with specified bounds LB and UB.
1957 If such var does not exists then new var is created with specified NAME. */
1958 static tree
1959 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1960 HOST_WIDE_INT ub,
1961 const char *name)
1962 {
1963 tree id = get_identifier (name);
1964 tree var;
1965 varpool_node *node;
1966 symtab_node *snode;
1967
1968 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1969 pointer_bounds_type_node);
1970 TREE_STATIC (var) = 1;
1971 TREE_PUBLIC (var) = 1;
1972
1973 /* With LTO we may have constant bounds already in varpool.
1974 Try to find it. */
1975 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1976 {
1977 /* We don't allow this symbol usage for non bounds. */
1978 if (snode->type != SYMTAB_VARIABLE
1979 || !POINTER_BOUNDS_P (snode->decl))
1980 sorry ("-fcheck-pointer-bounds requires '%s' "
1981 "name for internal usage",
1982 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1983
1984 return snode->decl;
1985 }
1986
1987 TREE_USED (var) = 1;
1988 TREE_READONLY (var) = 1;
1989 TREE_ADDRESSABLE (var) = 0;
1990 DECL_ARTIFICIAL (var) = 1;
1991 DECL_READ_P (var) = 1;
1992 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
1993 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
1994 /* We may use this symbol during ctors generation in chkp_finish_file
1995 when all symbols are emitted. Force output to avoid undefined
1996 symbols in ctors. */
1997 node = varpool_node::get_create (var);
1998 node->force_output = 1;
1999
2000 varpool_node::finalize_decl (var);
2001
2002 return var;
2003 }
2004
2005 /* Generate code to make bounds with specified lower bound LB and SIZE.
2006 if AFTER is 1 then code is inserted after position pointed by ITER
2007 otherwise code is inserted before position pointed by ITER.
2008 If ITER is NULL then code is added to entry block. */
2009 static tree
2010 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2011 {
2012 gimple_seq seq;
2013 gimple_stmt_iterator gsi;
2014 gimple *stmt;
2015 tree bounds;
2016
2017 if (iter)
2018 gsi = *iter;
2019 else
2020 gsi = gsi_start_bb (chkp_get_entry_block ());
2021
2022 seq = NULL;
2023
2024 lb = chkp_force_gimple_call_op (lb, &seq);
2025 size = chkp_force_gimple_call_op (size, &seq);
2026
2027 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2028 chkp_mark_stmt (stmt);
2029
2030 bounds = chkp_get_tmp_reg (stmt);
2031 gimple_call_set_lhs (stmt, bounds);
2032
2033 gimple_seq_add_stmt (&seq, stmt);
2034
2035 if (iter && after)
2036 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2037 else
2038 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2039
2040 if (dump_file && (dump_flags & TDF_DETAILS))
2041 {
2042 fprintf (dump_file, "Made bounds: ");
2043 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2044 if (iter)
2045 {
2046 fprintf (dump_file, " inserted before statement: ");
2047 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2048 }
2049 else
2050 fprintf (dump_file, " at function entry\n");
2051 }
2052
2053 /* update_stmt (stmt); */
2054
2055 return bounds;
2056 }
2057
2058 /* Return var holding zero bounds. */
2059 tree
2060 chkp_get_zero_bounds_var (void)
2061 {
2062 if (!chkp_zero_bounds_var)
2063 chkp_zero_bounds_var
2064 = chkp_make_static_const_bounds (0, -1,
2065 CHKP_ZERO_BOUNDS_VAR_NAME);
2066 return chkp_zero_bounds_var;
2067 }
2068
2069 /* Return var holding none bounds. */
2070 tree
2071 chkp_get_none_bounds_var (void)
2072 {
2073 if (!chkp_none_bounds_var)
2074 chkp_none_bounds_var
2075 = chkp_make_static_const_bounds (-1, 0,
2076 CHKP_NONE_BOUNDS_VAR_NAME);
2077 return chkp_none_bounds_var;
2078 }
2079
2080 /* Return SSA_NAME used to represent zero bounds. */
2081 static tree
2082 chkp_get_zero_bounds (void)
2083 {
2084 if (zero_bounds)
2085 return zero_bounds;
2086
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fprintf (dump_file, "Creating zero bounds...");
2089
2090 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2091 || flag_chkp_use_static_const_bounds > 0)
2092 {
2093 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2094 gimple *stmt;
2095
2096 zero_bounds = chkp_get_tmp_reg (NULL);
2097 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2098 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2099 }
2100 else
2101 zero_bounds = chkp_make_bounds (integer_zero_node,
2102 integer_zero_node,
2103 NULL,
2104 false);
2105
2106 return zero_bounds;
2107 }
2108
2109 /* Return SSA_NAME used to represent none bounds. */
2110 static tree
2111 chkp_get_none_bounds (void)
2112 {
2113 if (none_bounds)
2114 return none_bounds;
2115
2116 if (dump_file && (dump_flags & TDF_DETAILS))
2117 fprintf (dump_file, "Creating none bounds...");
2118
2119
2120 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2121 || flag_chkp_use_static_const_bounds > 0)
2122 {
2123 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2124 gimple *stmt;
2125
2126 none_bounds = chkp_get_tmp_reg (NULL);
2127 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2128 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2129 }
2130 else
2131 none_bounds = chkp_make_bounds (integer_minus_one_node,
2132 build_int_cst (size_type_node, 2),
2133 NULL,
2134 false);
2135
2136 return none_bounds;
2137 }
2138
2139 /* Return bounds to be used as a result of operation which
2140 should not create poiunter (e.g. MULT_EXPR). */
2141 static tree
2142 chkp_get_invalid_op_bounds (void)
2143 {
2144 return chkp_get_zero_bounds ();
2145 }
2146
2147 /* Return bounds to be used for loads of non-pointer values. */
2148 static tree
2149 chkp_get_nonpointer_load_bounds (void)
2150 {
2151 return chkp_get_zero_bounds ();
2152 }
2153
2154 /* Return 1 if may use bndret call to get bounds for pointer
2155 returned by CALL. */
2156 static bool
2157 chkp_call_returns_bounds_p (gcall *call)
2158 {
2159 if (gimple_call_internal_p (call))
2160 return false;
2161
2162 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2163 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2164 return true;
2165
2166 if (gimple_call_with_bounds_p (call))
2167 return true;
2168
2169 tree fndecl = gimple_call_fndecl (call);
2170
2171 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2172 return false;
2173
2174 if (fndecl && !chkp_instrumentable_p (fndecl))
2175 return false;
2176
2177 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2178 {
2179 if (chkp_instrument_normal_builtin (fndecl))
2180 return true;
2181
2182 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2183 return false;
2184
2185 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2186 return (clone && gimple_has_body_p (clone->decl));
2187 }
2188
2189 return true;
2190 }
2191
2192 /* Build bounds returned by CALL. */
2193 static tree
2194 chkp_build_returned_bound (gcall *call)
2195 {
2196 gimple_stmt_iterator gsi;
2197 tree bounds;
2198 gimple *stmt;
2199 tree fndecl = gimple_call_fndecl (call);
2200 unsigned int retflags;
2201
2202 /* To avoid fixing alloca expands in targets we handle
2203 it separately. */
2204 if (fndecl
2205 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2206 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2207 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2208 {
2209 tree size = gimple_call_arg (call, 0);
2210 tree lb = gimple_call_lhs (call);
2211 gimple_stmt_iterator iter = gsi_for_stmt (call);
2212 bounds = chkp_make_bounds (lb, size, &iter, true);
2213 }
2214 /* We know bounds returned by set_bounds builtin call. */
2215 else if (fndecl
2216 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2217 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2218 {
2219 tree lb = gimple_call_arg (call, 0);
2220 tree size = gimple_call_arg (call, 1);
2221 gimple_stmt_iterator iter = gsi_for_stmt (call);
2222 bounds = chkp_make_bounds (lb, size, &iter, true);
2223 }
2224 /* Detect bounds initialization calls. */
2225 else if (fndecl
2226 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2227 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2228 bounds = chkp_get_zero_bounds ();
2229 /* Detect bounds nullification calls. */
2230 else if (fndecl
2231 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2232 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2233 bounds = chkp_get_none_bounds ();
2234 /* Detect bounds copy calls. */
2235 else if (fndecl
2236 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2237 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2238 {
2239 gimple_stmt_iterator iter = gsi_for_stmt (call);
2240 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2241 }
2242 /* Do not use retbnd when returned bounds are equal to some
2243 of passed bounds. */
2244 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2245 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2246 {
2247 gimple_stmt_iterator iter = gsi_for_stmt (call);
2248 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2249 if (gimple_call_with_bounds_p (call))
2250 {
2251 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2252 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2253 {
2254 if (retarg)
2255 retarg--;
2256 else
2257 break;
2258 }
2259 }
2260 else
2261 argno = retarg;
2262
2263 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2264 }
2265 else if (chkp_call_returns_bounds_p (call))
2266 {
2267 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2268
2269 /* In general case build checker builtin call to
2270 obtain returned bounds. */
2271 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2272 gimple_call_lhs (call));
2273 chkp_mark_stmt (stmt);
2274
2275 gsi = gsi_for_stmt (call);
2276 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2277
2278 bounds = chkp_get_tmp_reg (stmt);
2279 gimple_call_set_lhs (stmt, bounds);
2280
2281 update_stmt (stmt);
2282 }
2283 else
2284 bounds = chkp_get_zero_bounds ();
2285
2286 if (dump_file && (dump_flags & TDF_DETAILS))
2287 {
2288 fprintf (dump_file, "Built returned bounds (");
2289 print_generic_expr (dump_file, bounds, 0);
2290 fprintf (dump_file, ") for call: ");
2291 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2292 }
2293
2294 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2295
2296 return bounds;
2297 }
2298
2299 /* Return bounds used as returned by call
2300 which produced SSA name VAL. */
2301 gcall *
2302 chkp_retbnd_call_by_val (tree val)
2303 {
2304 if (TREE_CODE (val) != SSA_NAME)
2305 return NULL;
2306
2307 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2308
2309 imm_use_iterator use_iter;
2310 use_operand_p use_p;
2311 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2312 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2313 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2314 return as_a <gcall *> (USE_STMT (use_p));
2315
2316 return NULL;
2317 }
2318
2319 /* Check the next parameter for the given PARM is bounds
2320 and return it's default SSA_NAME (create if required). */
2321 static tree
2322 chkp_get_next_bounds_parm (tree parm)
2323 {
2324 tree bounds = TREE_CHAIN (parm);
2325 gcc_assert (POINTER_BOUNDS_P (bounds));
2326 bounds = ssa_default_def (cfun, bounds);
2327 if (!bounds)
2328 {
2329 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2330 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2331 }
2332 return bounds;
2333 }
2334
2335 /* Return bounds to be used for input argument PARM. */
2336 static tree
2337 chkp_get_bound_for_parm (tree parm)
2338 {
2339 tree decl = SSA_NAME_VAR (parm);
2340 tree bounds;
2341
2342 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2343
2344 bounds = chkp_get_registered_bounds (parm);
2345
2346 if (!bounds)
2347 bounds = chkp_get_registered_bounds (decl);
2348
2349 if (!bounds)
2350 {
2351 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2352
2353 /* For static chain param we return zero bounds
2354 because currently we do not check dereferences
2355 of this pointer. */
2356 if (cfun->static_chain_decl == decl)
2357 bounds = chkp_get_zero_bounds ();
2358 /* If non instrumented runtime is used then it may be useful
2359 to use zero bounds for input arguments of main
2360 function. */
2361 else if (flag_chkp_zero_input_bounds_for_main
2362 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2363 "main") == 0)
2364 bounds = chkp_get_zero_bounds ();
2365 else if (BOUNDED_P (parm))
2366 {
2367 bounds = chkp_get_next_bounds_parm (decl);
2368 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2369
2370 if (dump_file && (dump_flags & TDF_DETAILS))
2371 {
2372 fprintf (dump_file, "Built arg bounds (");
2373 print_generic_expr (dump_file, bounds, 0);
2374 fprintf (dump_file, ") for arg: ");
2375 print_node (dump_file, "", decl, 0);
2376 }
2377 }
2378 else
2379 bounds = chkp_get_zero_bounds ();
2380 }
2381
2382 if (!chkp_get_registered_bounds (parm))
2383 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2384
2385 if (dump_file && (dump_flags & TDF_DETAILS))
2386 {
2387 fprintf (dump_file, "Using bounds ");
2388 print_generic_expr (dump_file, bounds, 0);
2389 fprintf (dump_file, " for parm ");
2390 print_generic_expr (dump_file, parm, 0);
2391 fprintf (dump_file, " of type ");
2392 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2393 fprintf (dump_file, ".\n");
2394 }
2395
2396 return bounds;
2397 }
2398
2399 /* Build and return CALL_EXPR for bndstx builtin with specified
2400 arguments. */
2401 tree
2402 chkp_build_bndldx_call (tree addr, tree ptr)
2403 {
2404 tree fn = build1 (ADDR_EXPR,
2405 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2406 chkp_bndldx_fndecl);
2407 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2408 fn, 2, addr, ptr);
2409 CALL_WITH_BOUNDS_P (call) = true;
2410 return call;
2411 }
2412
2413 /* Insert code to load bounds for PTR located by ADDR.
2414 Code is inserted after position pointed by GSI.
2415 Loaded bounds are returned. */
2416 static tree
2417 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2418 {
2419 gimple_seq seq;
2420 gimple *stmt;
2421 tree bounds;
2422
2423 seq = NULL;
2424
2425 addr = chkp_force_gimple_call_op (addr, &seq);
2426 ptr = chkp_force_gimple_call_op (ptr, &seq);
2427
2428 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2429 chkp_mark_stmt (stmt);
2430 bounds = chkp_get_tmp_reg (stmt);
2431 gimple_call_set_lhs (stmt, bounds);
2432
2433 gimple_seq_add_stmt (&seq, stmt);
2434
2435 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2436
2437 if (dump_file && (dump_flags & TDF_DETAILS))
2438 {
2439 fprintf (dump_file, "Generated bndldx for pointer ");
2440 print_generic_expr (dump_file, ptr, 0);
2441 fprintf (dump_file, ": ");
2442 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2443 }
2444
2445 return bounds;
2446 }
2447
2448 /* Build and return CALL_EXPR for bndstx builtin with specified
2449 arguments. */
2450 tree
2451 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2452 {
2453 tree fn = build1 (ADDR_EXPR,
2454 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2455 chkp_bndstx_fndecl);
2456 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2457 fn, 3, ptr, bounds, addr);
2458 CALL_WITH_BOUNDS_P (call) = true;
2459 return call;
2460 }
2461
2462 /* Insert code to store BOUNDS for PTR stored by ADDR.
2463 New statements are inserted after position pointed
2464 by GSI. */
2465 void
2466 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2467 gimple_stmt_iterator *gsi)
2468 {
2469 gimple_seq seq;
2470 gimple *stmt;
2471
2472 seq = NULL;
2473
2474 addr = chkp_force_gimple_call_op (addr, &seq);
2475 ptr = chkp_force_gimple_call_op (ptr, &seq);
2476
2477 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2478 chkp_mark_stmt (stmt);
2479 gimple_call_set_with_bounds (stmt, true);
2480
2481 gimple_seq_add_stmt (&seq, stmt);
2482
2483 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2484
2485 if (dump_file && (dump_flags & TDF_DETAILS))
2486 {
2487 fprintf (dump_file, "Generated bndstx for pointer store ");
2488 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2489 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2490 }
2491 }
2492
2493 /* Compute bounds for pointer NODE which was assigned in
2494 assignment statement ASSIGN. Return computed bounds. */
2495 static tree
2496 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2497 {
2498 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2499 tree rhs1 = gimple_assign_rhs1 (assign);
2500 tree bounds = NULL_TREE;
2501 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2502 tree base = NULL;
2503
2504 if (dump_file && (dump_flags & TDF_DETAILS))
2505 {
2506 fprintf (dump_file, "Computing bounds for assignment: ");
2507 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2508 }
2509
2510 switch (rhs_code)
2511 {
2512 case MEM_REF:
2513 case TARGET_MEM_REF:
2514 case COMPONENT_REF:
2515 case ARRAY_REF:
2516 /* We need to load bounds from the bounds table. */
2517 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2518 break;
2519
2520 case VAR_DECL:
2521 case SSA_NAME:
2522 case ADDR_EXPR:
2523 case POINTER_PLUS_EXPR:
2524 case NOP_EXPR:
2525 case CONVERT_EXPR:
2526 case INTEGER_CST:
2527 /* Bounds are just propagated from RHS. */
2528 bounds = chkp_find_bounds (rhs1, &iter);
2529 base = rhs1;
2530 break;
2531
2532 case VIEW_CONVERT_EXPR:
2533 /* Bounds are just propagated from RHS. */
2534 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2535 break;
2536
2537 case PARM_DECL:
2538 if (BOUNDED_P (rhs1))
2539 {
2540 /* We need to load bounds from the bounds table. */
2541 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2542 node, &iter);
2543 TREE_ADDRESSABLE (rhs1) = 1;
2544 }
2545 else
2546 bounds = chkp_get_nonpointer_load_bounds ();
2547 break;
2548
2549 case MINUS_EXPR:
2550 case PLUS_EXPR:
2551 case BIT_AND_EXPR:
2552 case BIT_IOR_EXPR:
2553 case BIT_XOR_EXPR:
2554 {
2555 tree rhs2 = gimple_assign_rhs2 (assign);
2556 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2557 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2558
2559 /* First we try to check types of operands. If it
2560 does not help then look at bound values.
2561
2562 If some bounds are incomplete and other are
2563 not proven to be valid (i.e. also incomplete
2564 or invalid because value is not pointer) then
2565 resulting value is incomplete and will be
2566 recomputed later in chkp_finish_incomplete_bounds. */
2567 if (BOUNDED_P (rhs1)
2568 && !BOUNDED_P (rhs2))
2569 bounds = bnd1;
2570 else if (BOUNDED_P (rhs2)
2571 && !BOUNDED_P (rhs1)
2572 && rhs_code != MINUS_EXPR)
2573 bounds = bnd2;
2574 else if (chkp_incomplete_bounds (bnd1))
2575 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2576 && !chkp_incomplete_bounds (bnd2))
2577 bounds = bnd2;
2578 else
2579 bounds = incomplete_bounds;
2580 else if (chkp_incomplete_bounds (bnd2))
2581 if (chkp_valid_bounds (bnd1)
2582 && !chkp_incomplete_bounds (bnd1))
2583 bounds = bnd1;
2584 else
2585 bounds = incomplete_bounds;
2586 else if (!chkp_valid_bounds (bnd1))
2587 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2588 bounds = bnd2;
2589 else if (bnd2 == chkp_get_zero_bounds ())
2590 bounds = bnd2;
2591 else
2592 bounds = bnd1;
2593 else if (!chkp_valid_bounds (bnd2))
2594 bounds = bnd1;
2595 else
2596 /* Seems both operands may have valid bounds
2597 (e.g. pointer minus pointer). In such case
2598 use default invalid op bounds. */
2599 bounds = chkp_get_invalid_op_bounds ();
2600
2601 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2602 }
2603 break;
2604
2605 case BIT_NOT_EXPR:
2606 case NEGATE_EXPR:
2607 case LSHIFT_EXPR:
2608 case RSHIFT_EXPR:
2609 case LROTATE_EXPR:
2610 case RROTATE_EXPR:
2611 case EQ_EXPR:
2612 case NE_EXPR:
2613 case LT_EXPR:
2614 case LE_EXPR:
2615 case GT_EXPR:
2616 case GE_EXPR:
2617 case MULT_EXPR:
2618 case RDIV_EXPR:
2619 case TRUNC_DIV_EXPR:
2620 case FLOOR_DIV_EXPR:
2621 case CEIL_DIV_EXPR:
2622 case ROUND_DIV_EXPR:
2623 case TRUNC_MOD_EXPR:
2624 case FLOOR_MOD_EXPR:
2625 case CEIL_MOD_EXPR:
2626 case ROUND_MOD_EXPR:
2627 case EXACT_DIV_EXPR:
2628 case FIX_TRUNC_EXPR:
2629 case FLOAT_EXPR:
2630 case REALPART_EXPR:
2631 case IMAGPART_EXPR:
2632 /* No valid bounds may be produced by these exprs. */
2633 bounds = chkp_get_invalid_op_bounds ();
2634 break;
2635
2636 case COND_EXPR:
2637 {
2638 tree val1 = gimple_assign_rhs2 (assign);
2639 tree val2 = gimple_assign_rhs3 (assign);
2640 tree bnd1 = chkp_find_bounds (val1, &iter);
2641 tree bnd2 = chkp_find_bounds (val2, &iter);
2642 gimple *stmt;
2643
2644 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2645 bounds = incomplete_bounds;
2646 else if (bnd1 == bnd2)
2647 bounds = bnd1;
2648 else
2649 {
2650 rhs1 = unshare_expr (rhs1);
2651
2652 bounds = chkp_get_tmp_reg (assign);
2653 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2654 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2655
2656 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2657 chkp_mark_invalid_bounds (bounds);
2658 }
2659 }
2660 break;
2661
2662 case MAX_EXPR:
2663 case MIN_EXPR:
2664 {
2665 tree rhs2 = gimple_assign_rhs2 (assign);
2666 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2667 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2668
2669 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2670 bounds = incomplete_bounds;
2671 else if (bnd1 == bnd2)
2672 bounds = bnd1;
2673 else
2674 {
2675 gimple *stmt;
2676 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2677 boolean_type_node, rhs1, rhs2);
2678 bounds = chkp_get_tmp_reg (assign);
2679 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2680
2681 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2682
2683 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2684 chkp_mark_invalid_bounds (bounds);
2685 }
2686 }
2687 break;
2688
2689 default:
2690 bounds = chkp_get_zero_bounds ();
2691 warning (0, "pointer bounds were lost due to unexpected expression %s",
2692 get_tree_code_name (rhs_code));
2693 }
2694
2695 gcc_assert (bounds);
2696
2697 /* We may reuse bounds of other pointer we copy/modify. But it is not
2698 allowed for abnormal ssa names. If we produced a pointer using
2699 abnormal ssa name, we better make a bounds copy to avoid coalescing
2700 issues. */
2701 if (base
2702 && TREE_CODE (base) == SSA_NAME
2703 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2704 {
2705 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2706 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2707 bounds = gimple_assign_lhs (stmt);
2708 }
2709
2710 if (node)
2711 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2712
2713 return bounds;
2714 }
2715
2716 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2717
2718 There are just few statement codes allowed: NOP (for default ssa names),
2719 ASSIGN, CALL, PHI, ASM.
2720
2721 Return computed bounds. */
2722 static tree
2723 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2724 gphi_iterator *iter)
2725 {
2726 tree var, bounds;
2727 enum gimple_code code = gimple_code (def_stmt);
2728 gphi *stmt;
2729
2730 if (dump_file && (dump_flags & TDF_DETAILS))
2731 {
2732 fprintf (dump_file, "Searching for bounds for node: ");
2733 print_generic_expr (dump_file, node, 0);
2734
2735 fprintf (dump_file, " using its definition: ");
2736 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2737 }
2738
2739 switch (code)
2740 {
2741 case GIMPLE_NOP:
2742 var = SSA_NAME_VAR (node);
2743 switch (TREE_CODE (var))
2744 {
2745 case PARM_DECL:
2746 bounds = chkp_get_bound_for_parm (node);
2747 break;
2748
2749 case VAR_DECL:
2750 /* For uninitialized pointers use none bounds. */
2751 bounds = chkp_get_none_bounds ();
2752 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2753 break;
2754
2755 case RESULT_DECL:
2756 {
2757 tree base_type;
2758
2759 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2760
2761 base_type = TREE_TYPE (TREE_TYPE (node));
2762
2763 gcc_assert (TYPE_SIZE (base_type)
2764 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2765 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2766
2767 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2768 NULL, false);
2769 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2770 }
2771 break;
2772
2773 default:
2774 if (dump_file && (dump_flags & TDF_DETAILS))
2775 {
2776 fprintf (dump_file, "Unexpected var with no definition\n");
2777 print_generic_expr (dump_file, var, 0);
2778 }
2779 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2780 get_tree_code_name (TREE_CODE (var)));
2781 }
2782 break;
2783
2784 case GIMPLE_ASSIGN:
2785 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2786 break;
2787
2788 case GIMPLE_CALL:
2789 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2790 break;
2791
2792 case GIMPLE_PHI:
2793 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2794 if (SSA_NAME_VAR (node))
2795 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2796 else
2797 var = make_temp_ssa_name (pointer_bounds_type_node,
2798 NULL,
2799 CHKP_BOUND_TMP_NAME);
2800 else
2801 var = chkp_get_tmp_var ();
2802 stmt = create_phi_node (var, gimple_bb (def_stmt));
2803 bounds = gimple_phi_result (stmt);
2804 *iter = gsi_for_phi (stmt);
2805
2806 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2807
2808 /* Created bounds do not have all phi args computed and
2809 therefore we do not know if there is a valid source
2810 of bounds for that node. Therefore we mark bounds
2811 as incomplete and then recompute them when all phi
2812 args are computed. */
2813 chkp_register_incomplete_bounds (bounds, node);
2814 break;
2815
2816 case GIMPLE_ASM:
2817 bounds = chkp_get_zero_bounds ();
2818 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2819 break;
2820
2821 default:
2822 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2823 gimple_code_name[code]);
2824 }
2825
2826 return bounds;
2827 }
2828
2829 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2830 tree
2831 chkp_build_make_bounds_call (tree lower_bound, tree size)
2832 {
2833 tree call = build1 (ADDR_EXPR,
2834 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2835 chkp_bndmk_fndecl);
2836 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2837 call, 2, lower_bound, size);
2838 }
2839
2840 /* Create static bounds var of specfified OBJ which is
2841 is either VAR_DECL or string constant. */
2842 static tree
2843 chkp_make_static_bounds (tree obj)
2844 {
2845 static int string_id = 1;
2846 static int var_id = 1;
2847 tree *slot;
2848 const char *var_name;
2849 char *bnd_var_name;
2850 tree bnd_var;
2851
2852 /* First check if we already have required var. */
2853 if (chkp_static_var_bounds)
2854 {
2855 /* For vars we use assembler name as a key in
2856 chkp_static_var_bounds map. It allows to
2857 avoid duplicating bound vars for decls
2858 sharing assembler name. */
2859 if (TREE_CODE (obj) == VAR_DECL)
2860 {
2861 tree name = DECL_ASSEMBLER_NAME (obj);
2862 slot = chkp_static_var_bounds->get (name);
2863 if (slot)
2864 return *slot;
2865 }
2866 else
2867 {
2868 slot = chkp_static_var_bounds->get (obj);
2869 if (slot)
2870 return *slot;
2871 }
2872 }
2873
2874 /* Build decl for bounds var. */
2875 if (TREE_CODE (obj) == VAR_DECL)
2876 {
2877 if (DECL_IGNORED_P (obj))
2878 {
2879 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2880 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2881 }
2882 else
2883 {
2884 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2885
2886 /* For hidden symbols we want to skip first '*' char. */
2887 if (*var_name == '*')
2888 var_name++;
2889
2890 bnd_var_name = (char *) xmalloc (strlen (var_name)
2891 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2892 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2893 strcat (bnd_var_name, var_name);
2894 }
2895
2896 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2897 get_identifier (bnd_var_name),
2898 pointer_bounds_type_node);
2899
2900 /* Address of the obj will be used as lower bound. */
2901 TREE_ADDRESSABLE (obj) = 1;
2902 }
2903 else
2904 {
2905 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2906 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2907
2908 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2909 get_identifier (bnd_var_name),
2910 pointer_bounds_type_node);
2911 }
2912
2913 TREE_PUBLIC (bnd_var) = 0;
2914 TREE_USED (bnd_var) = 1;
2915 TREE_READONLY (bnd_var) = 0;
2916 TREE_STATIC (bnd_var) = 1;
2917 TREE_ADDRESSABLE (bnd_var) = 0;
2918 DECL_ARTIFICIAL (bnd_var) = 1;
2919 DECL_COMMON (bnd_var) = 1;
2920 DECL_COMDAT (bnd_var) = 1;
2921 DECL_READ_P (bnd_var) = 1;
2922 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2923 /* Force output similar to constant bounds.
2924 See chkp_make_static_const_bounds. */
2925 varpool_node::get_create (bnd_var)->force_output = 1;
2926 /* Mark symbol as requiring bounds initialization. */
2927 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2928 varpool_node::finalize_decl (bnd_var);
2929
2930 /* Add created var to the map to use it for other references
2931 to obj. */
2932 if (!chkp_static_var_bounds)
2933 chkp_static_var_bounds = new hash_map<tree, tree>;
2934
2935 if (TREE_CODE (obj) == VAR_DECL)
2936 {
2937 tree name = DECL_ASSEMBLER_NAME (obj);
2938 chkp_static_var_bounds->put (name, bnd_var);
2939 }
2940 else
2941 chkp_static_var_bounds->put (obj, bnd_var);
2942
2943 return bnd_var;
2944 }
2945
2946 /* When var has incomplete type we cannot get size to
2947 compute its bounds. In such cases we use checker
2948 builtin call which determines object size at runtime. */
2949 static tree
2950 chkp_generate_extern_var_bounds (tree var)
2951 {
2952 tree bounds, size_reloc, lb, size, max_size, cond;
2953 gimple_stmt_iterator gsi;
2954 gimple_seq seq = NULL;
2955 gimple *stmt;
2956
2957 /* If instrumentation is not enabled for vars having
2958 incomplete type then just return zero bounds to avoid
2959 checks for this var. */
2960 if (!flag_chkp_incomplete_type)
2961 return chkp_get_zero_bounds ();
2962
2963 if (dump_file && (dump_flags & TDF_DETAILS))
2964 {
2965 fprintf (dump_file, "Generating bounds for extern symbol '");
2966 print_generic_expr (dump_file, var, 0);
2967 fprintf (dump_file, "'\n");
2968 }
2969
2970 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2971
2972 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
2973 gimple_call_set_lhs (stmt, size_reloc);
2974
2975 gimple_seq_add_stmt (&seq, stmt);
2976
2977 lb = chkp_build_addr_expr (var);
2978 size = make_ssa_name (chkp_get_size_tmp_var ());
2979
2980 if (flag_chkp_zero_dynamic_size_as_infinite)
2981 {
2982 /* We should check that size relocation was resolved.
2983 If it was not then use maximum possible size for the var. */
2984 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
2985 fold_convert (chkp_uintptr_type, lb));
2986 max_size = chkp_force_gimple_call_op (max_size, &seq);
2987
2988 cond = build2 (NE_EXPR, boolean_type_node,
2989 size_reloc, integer_zero_node);
2990 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
2991 gimple_seq_add_stmt (&seq, stmt);
2992 }
2993 else
2994 {
2995 stmt = gimple_build_assign (size, size_reloc);
2996 gimple_seq_add_stmt (&seq, stmt);
2997 }
2998
2999 gsi = gsi_start_bb (chkp_get_entry_block ());
3000 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3001
3002 bounds = chkp_make_bounds (lb, size, &gsi, true);
3003
3004 return bounds;
3005 }
3006
3007 /* Return 1 if TYPE has fields with zero size or fields
3008 marked with chkp_variable_size attribute. */
3009 bool
3010 chkp_variable_size_type (tree type)
3011 {
3012 bool res = false;
3013 tree field;
3014
3015 if (RECORD_OR_UNION_TYPE_P (type))
3016 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3017 {
3018 if (TREE_CODE (field) == FIELD_DECL)
3019 res = res
3020 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3021 || chkp_variable_size_type (TREE_TYPE (field));
3022 }
3023 else
3024 res = !TYPE_SIZE (type)
3025 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3026 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3027
3028 return res;
3029 }
3030
3031 /* Compute and return bounds for address of DECL which is
3032 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3033 static tree
3034 chkp_get_bounds_for_decl_addr (tree decl)
3035 {
3036 tree bounds;
3037
3038 gcc_assert (TREE_CODE (decl) == VAR_DECL
3039 || TREE_CODE (decl) == PARM_DECL
3040 || TREE_CODE (decl) == RESULT_DECL);
3041
3042 bounds = chkp_get_registered_addr_bounds (decl);
3043
3044 if (bounds)
3045 return bounds;
3046
3047 if (dump_file && (dump_flags & TDF_DETAILS))
3048 {
3049 fprintf (dump_file, "Building bounds for address of decl ");
3050 print_generic_expr (dump_file, decl, 0);
3051 fprintf (dump_file, "\n");
3052 }
3053
3054 /* Use zero bounds if size is unknown and checks for
3055 unknown sizes are restricted. */
3056 if ((!DECL_SIZE (decl)
3057 || (chkp_variable_size_type (TREE_TYPE (decl))
3058 && (TREE_STATIC (decl)
3059 || DECL_EXTERNAL (decl)
3060 || TREE_PUBLIC (decl))))
3061 && !flag_chkp_incomplete_type)
3062 return chkp_get_zero_bounds ();
3063
3064 if (flag_chkp_use_static_bounds
3065 && TREE_CODE (decl) == VAR_DECL
3066 && (TREE_STATIC (decl)
3067 || DECL_EXTERNAL (decl)
3068 || TREE_PUBLIC (decl))
3069 && !DECL_THREAD_LOCAL_P (decl))
3070 {
3071 tree bnd_var = chkp_make_static_bounds (decl);
3072 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3073 gimple *stmt;
3074
3075 bounds = chkp_get_tmp_reg (NULL);
3076 stmt = gimple_build_assign (bounds, bnd_var);
3077 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3078 }
3079 else if (!DECL_SIZE (decl)
3080 || (chkp_variable_size_type (TREE_TYPE (decl))
3081 && (TREE_STATIC (decl)
3082 || DECL_EXTERNAL (decl)
3083 || TREE_PUBLIC (decl))))
3084 {
3085 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3086 bounds = chkp_generate_extern_var_bounds (decl);
3087 }
3088 else
3089 {
3090 tree lb = chkp_build_addr_expr (decl);
3091 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3092 }
3093
3094 return bounds;
3095 }
3096
3097 /* Compute and return bounds for constant string. */
3098 static tree
3099 chkp_get_bounds_for_string_cst (tree cst)
3100 {
3101 tree bounds;
3102 tree lb;
3103 tree size;
3104
3105 gcc_assert (TREE_CODE (cst) == STRING_CST);
3106
3107 bounds = chkp_get_registered_bounds (cst);
3108
3109 if (bounds)
3110 return bounds;
3111
3112 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3113 || flag_chkp_use_static_const_bounds > 0)
3114 {
3115 tree bnd_var = chkp_make_static_bounds (cst);
3116 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3117 gimple *stmt;
3118
3119 bounds = chkp_get_tmp_reg (NULL);
3120 stmt = gimple_build_assign (bounds, bnd_var);
3121 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3122 }
3123 else
3124 {
3125 lb = chkp_build_addr_expr (cst);
3126 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3127 bounds = chkp_make_bounds (lb, size, NULL, false);
3128 }
3129
3130 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3131
3132 return bounds;
3133 }
3134
3135 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3136 return the result. if ITER is not NULL then Code is inserted
3137 before position pointed by ITER. Otherwise code is added to
3138 entry block. */
3139 static tree
3140 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3141 {
3142 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3143 return bounds2 ? bounds2 : bounds1;
3144 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3145 return bounds1;
3146 else
3147 {
3148 gimple_seq seq;
3149 gimple *stmt;
3150 tree bounds;
3151
3152 seq = NULL;
3153
3154 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3155 chkp_mark_stmt (stmt);
3156
3157 bounds = chkp_get_tmp_reg (stmt);
3158 gimple_call_set_lhs (stmt, bounds);
3159
3160 gimple_seq_add_stmt (&seq, stmt);
3161
3162 /* We are probably doing narrowing for constant expression.
3163 In such case iter may be undefined. */
3164 if (!iter)
3165 {
3166 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3167 iter = &gsi;
3168 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3169 }
3170 else
3171 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3172
3173 if (dump_file && (dump_flags & TDF_DETAILS))
3174 {
3175 fprintf (dump_file, "Bounds intersection: ");
3176 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3177 fprintf (dump_file, " inserted before statement: ");
3178 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3179 TDF_VOPS|TDF_MEMSYMS);
3180 }
3181
3182 return bounds;
3183 }
3184 }
3185
3186 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3187 and 0 othersize. */
3188 static bool
3189 chkp_may_narrow_to_field (tree field)
3190 {
3191 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3192 && tree_to_uhwi (DECL_SIZE (field)) != 0
3193 && (!DECL_FIELD_OFFSET (field)
3194 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3195 && (!DECL_FIELD_BIT_OFFSET (field)
3196 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3197 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3198 && !chkp_variable_size_type (TREE_TYPE (field));
3199 }
3200
3201 /* Return 1 if bounds for FIELD should be narrowed to
3202 field's own size. */
3203 static bool
3204 chkp_narrow_bounds_for_field (tree field)
3205 {
3206 HOST_WIDE_INT offs;
3207 HOST_WIDE_INT bit_offs;
3208
3209 if (!chkp_may_narrow_to_field (field))
3210 return false;
3211
3212 /* Accesse to compiler generated fields should not cause
3213 bounds narrowing. */
3214 if (DECL_ARTIFICIAL (field))
3215 return false;
3216
3217 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3218 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3219
3220 return (flag_chkp_narrow_bounds
3221 && (flag_chkp_first_field_has_own_bounds
3222 || offs
3223 || bit_offs));
3224 }
3225
3226 /* Perform narrowing for BOUNDS using bounds computed for field
3227 access COMPONENT. ITER meaning is the same as for
3228 chkp_intersect_bounds. */
3229 static tree
3230 chkp_narrow_bounds_to_field (tree bounds, tree component,
3231 gimple_stmt_iterator *iter)
3232 {
3233 tree field = TREE_OPERAND (component, 1);
3234 tree size = DECL_SIZE_UNIT (field);
3235 tree field_ptr = chkp_build_addr_expr (component);
3236 tree field_bounds;
3237
3238 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3239
3240 return chkp_intersect_bounds (field_bounds, bounds, iter);
3241 }
3242
3243 /* Parse field or array access NODE.
3244
3245 PTR ouput parameter holds a pointer to the outermost
3246 object.
3247
3248 BITFIELD output parameter is set to 1 if bitfield is
3249 accessed and to 0 otherwise. If it is 1 then ELT holds
3250 outer component for accessed bit field.
3251
3252 SAFE outer parameter is set to 1 if access is safe and
3253 checks are not required.
3254
3255 BOUNDS outer parameter holds bounds to be used to check
3256 access (may be NULL).
3257
3258 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3259 innermost accessed component. */
3260 static void
3261 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3262 tree *elt, bool *safe,
3263 bool *bitfield,
3264 tree *bounds,
3265 gimple_stmt_iterator *iter,
3266 bool innermost_bounds)
3267 {
3268 tree comp_to_narrow = NULL_TREE;
3269 tree last_comp = NULL_TREE;
3270 bool array_ref_found = false;
3271 tree *nodes;
3272 tree var;
3273 int len;
3274 int i;
3275
3276 /* Compute tree height for expression. */
3277 var = node;
3278 len = 1;
3279 while (TREE_CODE (var) == COMPONENT_REF
3280 || TREE_CODE (var) == ARRAY_REF
3281 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3282 {
3283 var = TREE_OPERAND (var, 0);
3284 len++;
3285 }
3286
3287 gcc_assert (len > 1);
3288
3289 /* It is more convenient for us to scan left-to-right,
3290 so walk tree again and put all node to nodes vector
3291 in reversed order. */
3292 nodes = XALLOCAVEC (tree, len);
3293 nodes[len - 1] = node;
3294 for (i = len - 2; i >= 0; i--)
3295 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3296
3297 if (bounds)
3298 *bounds = NULL;
3299 *safe = true;
3300 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3301 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3302 /* To get bitfield address we will need outer elemnt. */
3303 if (*bitfield)
3304 *elt = nodes[len - 2];
3305 else
3306 *elt = NULL_TREE;
3307
3308 /* If we have indirection in expression then compute
3309 outermost structure bounds. Computed bounds may be
3310 narrowed later. */
3311 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3312 {
3313 *safe = false;
3314 *ptr = TREE_OPERAND (nodes[0], 0);
3315 if (bounds)
3316 *bounds = chkp_find_bounds (*ptr, iter);
3317 }
3318 else
3319 {
3320 gcc_assert (TREE_CODE (var) == VAR_DECL
3321 || TREE_CODE (var) == PARM_DECL
3322 || TREE_CODE (var) == RESULT_DECL
3323 || TREE_CODE (var) == STRING_CST
3324 || TREE_CODE (var) == SSA_NAME);
3325
3326 *ptr = chkp_build_addr_expr (var);
3327 }
3328
3329 /* In this loop we are trying to find a field access
3330 requiring narrowing. There are two simple rules
3331 for search:
3332 1. Leftmost array_ref is chosen if any.
3333 2. Rightmost suitable component_ref is chosen if innermost
3334 bounds are required and no array_ref exists. */
3335 for (i = 1; i < len; i++)
3336 {
3337 var = nodes[i];
3338
3339 if (TREE_CODE (var) == ARRAY_REF)
3340 {
3341 *safe = false;
3342 array_ref_found = true;
3343 if (flag_chkp_narrow_bounds
3344 && !flag_chkp_narrow_to_innermost_arrray
3345 && (!last_comp
3346 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3347 {
3348 comp_to_narrow = last_comp;
3349 break;
3350 }
3351 }
3352 else if (TREE_CODE (var) == COMPONENT_REF)
3353 {
3354 tree field = TREE_OPERAND (var, 1);
3355
3356 if (innermost_bounds
3357 && !array_ref_found
3358 && chkp_narrow_bounds_for_field (field))
3359 comp_to_narrow = var;
3360 last_comp = var;
3361
3362 if (flag_chkp_narrow_bounds
3363 && flag_chkp_narrow_to_innermost_arrray
3364 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3365 {
3366 if (bounds)
3367 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3368 comp_to_narrow = NULL;
3369 }
3370 }
3371 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3372 /* Nothing to do for it. */
3373 ;
3374 else
3375 gcc_unreachable ();
3376 }
3377
3378 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3379 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3380
3381 if (innermost_bounds && bounds && !*bounds)
3382 *bounds = chkp_find_bounds (*ptr, iter);
3383 }
3384
3385 /* Compute and return bounds for address of OBJ. */
3386 static tree
3387 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3388 {
3389 tree bounds = chkp_get_registered_addr_bounds (obj);
3390
3391 if (bounds)
3392 return bounds;
3393
3394 switch (TREE_CODE (obj))
3395 {
3396 case VAR_DECL:
3397 case PARM_DECL:
3398 case RESULT_DECL:
3399 bounds = chkp_get_bounds_for_decl_addr (obj);
3400 break;
3401
3402 case STRING_CST:
3403 bounds = chkp_get_bounds_for_string_cst (obj);
3404 break;
3405
3406 case ARRAY_REF:
3407 case COMPONENT_REF:
3408 {
3409 tree elt;
3410 tree ptr;
3411 bool safe;
3412 bool bitfield;
3413
3414 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3415 &bitfield, &bounds, iter, true);
3416
3417 gcc_assert (bounds);
3418 }
3419 break;
3420
3421 case FUNCTION_DECL:
3422 case LABEL_DECL:
3423 bounds = chkp_get_zero_bounds ();
3424 break;
3425
3426 case MEM_REF:
3427 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3428 break;
3429
3430 case REALPART_EXPR:
3431 case IMAGPART_EXPR:
3432 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3433 break;
3434
3435 default:
3436 if (dump_file && (dump_flags & TDF_DETAILS))
3437 {
3438 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3439 "unexpected object of type %s\n",
3440 get_tree_code_name (TREE_CODE (obj)));
3441 print_node (dump_file, "", obj, 0);
3442 }
3443 internal_error ("chkp_make_addressed_object_bounds: "
3444 "Unexpected tree code %s",
3445 get_tree_code_name (TREE_CODE (obj)));
3446 }
3447
3448 chkp_register_addr_bounds (obj, bounds);
3449
3450 return bounds;
3451 }
3452
3453 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3454 to compute bounds if required. Computed bounds should be available at
3455 position pointed by ITER.
3456
3457 If PTR_SRC is NULL_TREE then pointer definition is identified.
3458
3459 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3460 PTR. If PTR is a any memory reference then ITER points to a statement
3461 after which bndldx will be inserterd. In both cases ITER will be updated
3462 to point to the inserted bndldx statement. */
3463
3464 static tree
3465 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3466 {
3467 tree addr = NULL_TREE;
3468 tree bounds = NULL_TREE;
3469
3470 if (!ptr_src)
3471 ptr_src = ptr;
3472
3473 bounds = chkp_get_registered_bounds (ptr_src);
3474
3475 if (bounds)
3476 return bounds;
3477
3478 switch (TREE_CODE (ptr_src))
3479 {
3480 case MEM_REF:
3481 case VAR_DECL:
3482 if (BOUNDED_P (ptr_src))
3483 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3484 bounds = chkp_get_zero_bounds ();
3485 else
3486 {
3487 addr = chkp_build_addr_expr (ptr_src);
3488 bounds = chkp_build_bndldx (addr, ptr, iter);
3489 }
3490 else
3491 bounds = chkp_get_nonpointer_load_bounds ();
3492 break;
3493
3494 case ARRAY_REF:
3495 case COMPONENT_REF:
3496 addr = get_base_address (ptr_src);
3497 if (DECL_P (addr)
3498 || TREE_CODE (addr) == MEM_REF
3499 || TREE_CODE (addr) == TARGET_MEM_REF)
3500 {
3501 if (BOUNDED_P (ptr_src))
3502 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3503 bounds = chkp_get_zero_bounds ();
3504 else
3505 {
3506 addr = chkp_build_addr_expr (ptr_src);
3507 bounds = chkp_build_bndldx (addr, ptr, iter);
3508 }
3509 else
3510 bounds = chkp_get_nonpointer_load_bounds ();
3511 }
3512 else
3513 {
3514 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3515 bounds = chkp_find_bounds (addr, iter);
3516 }
3517 break;
3518
3519 case PARM_DECL:
3520 gcc_unreachable ();
3521 bounds = chkp_get_bound_for_parm (ptr_src);
3522 break;
3523
3524 case TARGET_MEM_REF:
3525 addr = chkp_build_addr_expr (ptr_src);
3526 bounds = chkp_build_bndldx (addr, ptr, iter);
3527 break;
3528
3529 case SSA_NAME:
3530 bounds = chkp_get_registered_bounds (ptr_src);
3531 if (!bounds)
3532 {
3533 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3534 gphi_iterator phi_iter;
3535
3536 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3537
3538 gcc_assert (bounds);
3539
3540 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3541 {
3542 unsigned i;
3543
3544 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3545 {
3546 tree arg = gimple_phi_arg_def (def_phi, i);
3547 tree arg_bnd;
3548 gphi *phi_bnd;
3549
3550 arg_bnd = chkp_find_bounds (arg, NULL);
3551
3552 /* chkp_get_bounds_by_definition created new phi
3553 statement and phi_iter points to it.
3554
3555 Previous call to chkp_find_bounds could create
3556 new basic block and therefore change phi statement
3557 phi_iter points to. */
3558 phi_bnd = phi_iter.phi ();
3559
3560 add_phi_arg (phi_bnd, arg_bnd,
3561 gimple_phi_arg_edge (def_phi, i),
3562 UNKNOWN_LOCATION);
3563 }
3564
3565 /* If all bound phi nodes have their arg computed
3566 then we may finish its computation. See
3567 chkp_finish_incomplete_bounds for more details. */
3568 if (chkp_may_finish_incomplete_bounds ())
3569 chkp_finish_incomplete_bounds ();
3570 }
3571
3572 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3573 || chkp_incomplete_bounds (bounds));
3574 }
3575 break;
3576
3577 case ADDR_EXPR:
3578 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3579 break;
3580
3581 case INTEGER_CST:
3582 if (integer_zerop (ptr_src))
3583 bounds = chkp_get_none_bounds ();
3584 else
3585 bounds = chkp_get_invalid_op_bounds ();
3586 break;
3587
3588 default:
3589 if (dump_file && (dump_flags & TDF_DETAILS))
3590 {
3591 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3592 get_tree_code_name (TREE_CODE (ptr_src)));
3593 print_node (dump_file, "", ptr_src, 0);
3594 }
3595 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3596 get_tree_code_name (TREE_CODE (ptr_src)));
3597 }
3598
3599 if (!bounds)
3600 {
3601 if (dump_file && (dump_flags & TDF_DETAILS))
3602 {
3603 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3604 print_node (dump_file, "", ptr_src, 0);
3605 }
3606 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3607 }
3608
3609 return bounds;
3610 }
3611
3612 /* Normal case for bounds search without forced narrowing. */
3613 static tree
3614 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3615 {
3616 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3617 }
3618
3619 /* Search bounds for pointer PTR loaded from PTR_SRC
3620 by statement *ITER points to. */
3621 static tree
3622 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3623 {
3624 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3625 }
3626
3627 /* Helper function which checks type of RHS and finds all pointers in
3628 it. For each found pointer we build it's accesses in LHS and RHS
3629 objects and then call HANDLER for them. Function is used to copy
3630 or initilize bounds for copied object. */
3631 static void
3632 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3633 assign_handler handler)
3634 {
3635 tree type = TREE_TYPE (lhs);
3636
3637 /* We have nothing to do with clobbers. */
3638 if (TREE_CLOBBER_P (rhs))
3639 return;
3640
3641 if (BOUNDED_TYPE_P (type))
3642 handler (lhs, rhs, arg);
3643 else if (RECORD_OR_UNION_TYPE_P (type))
3644 {
3645 tree field;
3646
3647 if (TREE_CODE (rhs) == CONSTRUCTOR)
3648 {
3649 unsigned HOST_WIDE_INT cnt;
3650 tree val;
3651
3652 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3653 {
3654 if (chkp_type_has_pointer (TREE_TYPE (field)))
3655 {
3656 tree lhs_field = chkp_build_component_ref (lhs, field);
3657 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3658 }
3659 }
3660 }
3661 else
3662 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3663 if (TREE_CODE (field) == FIELD_DECL
3664 && chkp_type_has_pointer (TREE_TYPE (field)))
3665 {
3666 tree rhs_field = chkp_build_component_ref (rhs, field);
3667 tree lhs_field = chkp_build_component_ref (lhs, field);
3668 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3669 }
3670 }
3671 else if (TREE_CODE (type) == ARRAY_TYPE)
3672 {
3673 unsigned HOST_WIDE_INT cur = 0;
3674 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3675 tree etype = TREE_TYPE (type);
3676 tree esize = TYPE_SIZE (etype);
3677
3678 if (TREE_CODE (rhs) == CONSTRUCTOR)
3679 {
3680 unsigned HOST_WIDE_INT cnt;
3681 tree purp, val, lhs_elem;
3682
3683 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3684 {
3685 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3686 {
3687 tree lo_index = TREE_OPERAND (purp, 0);
3688 tree hi_index = TREE_OPERAND (purp, 1);
3689
3690 for (cur = (unsigned)tree_to_uhwi (lo_index);
3691 cur <= (unsigned)tree_to_uhwi (hi_index);
3692 cur++)
3693 {
3694 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3695 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3696 }
3697 }
3698 else
3699 {
3700 if (purp)
3701 {
3702 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3703 cur = tree_to_uhwi (purp);
3704 }
3705
3706 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3707
3708 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3709 }
3710 }
3711 }
3712 /* Copy array only when size is known. */
3713 else if (maxval && !integer_minus_onep (maxval))
3714 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3715 {
3716 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3717 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3718 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3719 }
3720 }
3721 else
3722 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3723 get_tree_code_name (TREE_CODE (type)));
3724 }
3725
3726 /* Add code to copy bounds for assignment of RHS to LHS.
3727 ARG is an iterator pointing ne code position. */
3728 static void
3729 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3730 {
3731 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3732 tree bounds = chkp_find_bounds (rhs, iter);
3733 tree addr = chkp_build_addr_expr(lhs);
3734
3735 chkp_build_bndstx (addr, rhs, bounds, iter);
3736 }
3737
3738 /* Emit static bound initilizers and size vars. */
3739 void
3740 chkp_finish_file (void)
3741 {
3742 struct varpool_node *node;
3743 struct chkp_ctor_stmt_list stmts;
3744
3745 if (seen_error ())
3746 return;
3747
3748 /* Iterate through varpool and generate bounds initialization
3749 constructors for all statically initialized pointers. */
3750 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3751 stmts.stmts = NULL;
3752 FOR_EACH_VARIABLE (node)
3753 /* Check that var is actually emitted and we need and may initialize
3754 its bounds. */
3755 if (node->need_bounds_init
3756 && !POINTER_BOUNDS_P (node->decl)
3757 && DECL_RTL (node->decl)
3758 && MEM_P (DECL_RTL (node->decl))
3759 && TREE_ASM_WRITTEN (node->decl))
3760 {
3761 chkp_walk_pointer_assignments (node->decl,
3762 DECL_INITIAL (node->decl),
3763 &stmts,
3764 chkp_add_modification_to_stmt_list);
3765
3766 if (stmts.avail <= 0)
3767 {
3768 cgraph_build_static_cdtor ('P', stmts.stmts,
3769 MAX_RESERVED_INIT_PRIORITY + 3);
3770 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3771 stmts.stmts = NULL;
3772 }
3773 }
3774
3775 if (stmts.stmts)
3776 cgraph_build_static_cdtor ('P', stmts.stmts,
3777 MAX_RESERVED_INIT_PRIORITY + 3);
3778
3779 /* Iterate through varpool and generate bounds initialization
3780 constructors for all static bounds vars. */
3781 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3782 stmts.stmts = NULL;
3783 FOR_EACH_VARIABLE (node)
3784 if (node->need_bounds_init
3785 && POINTER_BOUNDS_P (node->decl)
3786 && TREE_ASM_WRITTEN (node->decl))
3787 {
3788 tree bnd = node->decl;
3789 tree var;
3790
3791 gcc_assert (DECL_INITIAL (bnd)
3792 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3793
3794 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3795 chkp_output_static_bounds (bnd, var, &stmts);
3796 }
3797
3798 if (stmts.stmts)
3799 cgraph_build_static_cdtor ('B', stmts.stmts,
3800 MAX_RESERVED_INIT_PRIORITY + 2);
3801
3802 delete chkp_static_var_bounds;
3803 delete chkp_bounds_map;
3804 }
3805
3806 /* An instrumentation function which is called for each statement
3807 having memory access we want to instrument. It inserts check
3808 code and bounds copy code.
3809
3810 ITER points to statement to instrument.
3811
3812 NODE holds memory access in statement to check.
3813
3814 LOC holds the location information for statement.
3815
3816 DIRFLAGS determines whether access is read or write.
3817
3818 ACCESS_OFFS should be added to address used in NODE
3819 before check.
3820
3821 ACCESS_SIZE holds size of checked access.
3822
3823 SAFE indicates if NODE access is safe and should not be
3824 checked. */
3825 static void
3826 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3827 location_t loc, tree dirflag,
3828 tree access_offs, tree access_size,
3829 bool safe)
3830 {
3831 tree node_type = TREE_TYPE (node);
3832 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3833 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3834 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3835 tree ptr = NULL_TREE; /* a pointer used for dereference */
3836 tree bounds = NULL_TREE;
3837
3838 /* We do not need instrumentation for clobbers. */
3839 if (dirflag == integer_one_node
3840 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3841 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3842 return;
3843
3844 switch (TREE_CODE (node))
3845 {
3846 case ARRAY_REF:
3847 case COMPONENT_REF:
3848 {
3849 bool bitfield;
3850 tree elt;
3851
3852 if (safe)
3853 {
3854 /* We are not going to generate any checks, so do not
3855 generate bounds as well. */
3856 addr_first = chkp_build_addr_expr (node);
3857 break;
3858 }
3859
3860 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3861 &bitfield, &bounds, iter, false);
3862
3863 /* Break if there is no dereference and operation is safe. */
3864
3865 if (bitfield)
3866 {
3867 tree field = TREE_OPERAND (node, 1);
3868
3869 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3870 size = DECL_SIZE_UNIT (field);
3871
3872 if (elt)
3873 elt = chkp_build_addr_expr (elt);
3874 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3875 addr_first = fold_build_pointer_plus_loc (loc,
3876 addr_first,
3877 byte_position (field));
3878 }
3879 else
3880 addr_first = chkp_build_addr_expr (node);
3881 }
3882 break;
3883
3884 case INDIRECT_REF:
3885 ptr = TREE_OPERAND (node, 0);
3886 addr_first = ptr;
3887 break;
3888
3889 case MEM_REF:
3890 ptr = TREE_OPERAND (node, 0);
3891 addr_first = chkp_build_addr_expr (node);
3892 break;
3893
3894 case TARGET_MEM_REF:
3895 ptr = TMR_BASE (node);
3896 addr_first = chkp_build_addr_expr (node);
3897 break;
3898
3899 case ARRAY_RANGE_REF:
3900 printf("ARRAY_RANGE_REF\n");
3901 debug_gimple_stmt(gsi_stmt(*iter));
3902 debug_tree(node);
3903 gcc_unreachable ();
3904 break;
3905
3906 case BIT_FIELD_REF:
3907 {
3908 tree offs, rem, bpu;
3909
3910 gcc_assert (!access_offs);
3911 gcc_assert (!access_size);
3912
3913 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3914 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3915 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3916 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3917
3918 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3919 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3920 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3921 size = fold_convert (size_type_node, size);
3922
3923 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3924 dirflag, offs, size, safe);
3925 return;
3926 }
3927 break;
3928
3929 case VAR_DECL:
3930 case RESULT_DECL:
3931 case PARM_DECL:
3932 if (dirflag != integer_one_node
3933 || DECL_REGISTER (node))
3934 return;
3935
3936 safe = true;
3937 addr_first = chkp_build_addr_expr (node);
3938 break;
3939
3940 default:
3941 return;
3942 }
3943
3944 /* If addr_last was not computed then use (addr_first + size - 1)
3945 expression to compute it. */
3946 if (!addr_last)
3947 {
3948 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3949 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3950 }
3951
3952 /* Shift both first_addr and last_addr by access_offs if specified. */
3953 if (access_offs)
3954 {
3955 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3956 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3957 }
3958
3959 /* Generate bndcl/bndcu checks if memory access is not safe. */
3960 if (!safe)
3961 {
3962 gimple_stmt_iterator stmt_iter = *iter;
3963
3964 if (!bounds)
3965 bounds = chkp_find_bounds (ptr, iter);
3966
3967 chkp_check_mem_access (addr_first, addr_last, bounds,
3968 stmt_iter, loc, dirflag);
3969 }
3970
3971 /* We need to store bounds in case pointer is stored. */
3972 if (dirflag == integer_one_node
3973 && chkp_type_has_pointer (node_type)
3974 && flag_chkp_store_bounds)
3975 {
3976 gimple *stmt = gsi_stmt (*iter);
3977 tree rhs1 = gimple_assign_rhs1 (stmt);
3978 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3979
3980 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
3981 chkp_walk_pointer_assignments (node, rhs1, iter,
3982 chkp_copy_bounds_for_elem);
3983 else
3984 {
3985 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
3986 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
3987 }
3988 }
3989 }
3990
3991 /* Add code to copy bounds for all pointers copied
3992 in ASSIGN created during inline of EDGE. */
3993 void
3994 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
3995 {
3996 tree lhs = gimple_assign_lhs (assign);
3997 tree rhs = gimple_assign_rhs1 (assign);
3998 gimple_stmt_iterator iter = gsi_for_stmt (assign);
3999
4000 if (!flag_chkp_store_bounds)
4001 return;
4002
4003 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4004
4005 /* We should create edges for all created calls to bndldx and bndstx. */
4006 while (gsi_stmt (iter) != assign)
4007 {
4008 gimple *stmt = gsi_stmt (iter);
4009 if (gimple_code (stmt) == GIMPLE_CALL)
4010 {
4011 tree fndecl = gimple_call_fndecl (stmt);
4012 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4013 struct cgraph_edge *new_edge;
4014
4015 gcc_assert (fndecl == chkp_bndstx_fndecl
4016 || fndecl == chkp_bndldx_fndecl
4017 || fndecl == chkp_ret_bnd_fndecl);
4018
4019 new_edge = edge->caller->create_edge (callee,
4020 as_a <gcall *> (stmt),
4021 edge->count,
4022 edge->frequency);
4023 new_edge->frequency = compute_call_stmt_bb_frequency
4024 (edge->caller->decl, gimple_bb (stmt));
4025 }
4026 gsi_prev (&iter);
4027 }
4028 }
4029
4030 /* Some code transformation made during instrumentation pass
4031 may put code into inconsistent state. Here we find and fix
4032 such flaws. */
4033 void
4034 chkp_fix_cfg ()
4035 {
4036 basic_block bb;
4037 gimple_stmt_iterator i;
4038
4039 /* We could insert some code right after stmt which ends bb.
4040 We wanted to put this code on fallthru edge but did not
4041 add new edges from the beginning because it may cause new
4042 phi node creation which may be incorrect due to incomplete
4043 bound phi nodes. */
4044 FOR_ALL_BB_FN (bb, cfun)
4045 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4046 {
4047 gimple *stmt = gsi_stmt (i);
4048 gimple_stmt_iterator next = i;
4049
4050 gsi_next (&next);
4051
4052 if (stmt_ends_bb_p (stmt)
4053 && !gsi_end_p (next))
4054 {
4055 edge fall = find_fallthru_edge (bb->succs);
4056 basic_block dest = NULL;
4057 int flags = 0;
4058
4059 gcc_assert (fall);
4060
4061 /* We cannot split abnormal edge. Therefore we
4062 store its params, make it regular and then
4063 rebuild abnormal edge after split. */
4064 if (fall->flags & EDGE_ABNORMAL)
4065 {
4066 flags = fall->flags & ~EDGE_FALLTHRU;
4067 dest = fall->dest;
4068
4069 fall->flags &= ~EDGE_COMPLEX;
4070 }
4071
4072 while (!gsi_end_p (next))
4073 {
4074 gimple *next_stmt = gsi_stmt (next);
4075 gsi_remove (&next, false);
4076 gsi_insert_on_edge (fall, next_stmt);
4077 }
4078
4079 gsi_commit_edge_inserts ();
4080
4081 /* Re-create abnormal edge. */
4082 if (dest)
4083 make_edge (bb, dest, flags);
4084 }
4085 }
4086 }
4087
4088 /* Walker callback for chkp_replace_function_pointers. Replaces
4089 function pointer in the specified operand with pointer to the
4090 instrumented function version. */
4091 static tree
4092 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4093 void *data ATTRIBUTE_UNUSED)
4094 {
4095 if (TREE_CODE (*op) == FUNCTION_DECL
4096 && chkp_instrumentable_p (*op)
4097 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4098 /* For builtins we replace pointers only for selected
4099 function and functions having definitions. */
4100 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4101 && (chkp_instrument_normal_builtin (*op)
4102 || gimple_has_body_p (*op)))))
4103 {
4104 struct cgraph_node *node = cgraph_node::get_create (*op);
4105 struct cgraph_node *clone = NULL;
4106
4107 if (!node->instrumentation_clone)
4108 clone = chkp_maybe_create_clone (*op);
4109
4110 if (clone)
4111 *op = clone->decl;
4112 *walk_subtrees = 0;
4113 }
4114
4115 return NULL;
4116 }
4117
4118 /* This function searches for function pointers in statement
4119 pointed by GSI and replaces them with pointers to instrumented
4120 function versions. */
4121 static void
4122 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4123 {
4124 gimple *stmt = gsi_stmt (*gsi);
4125 /* For calls we want to walk call args only. */
4126 if (gimple_code (stmt) == GIMPLE_CALL)
4127 {
4128 unsigned i;
4129 for (i = 0; i < gimple_call_num_args (stmt); i++)
4130 walk_tree (gimple_call_arg_ptr (stmt, i),
4131 chkp_replace_function_pointer, NULL, NULL);
4132 }
4133 else
4134 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4135 }
4136
4137 /* This function instruments all statements working with memory,
4138 calls and rets.
4139
4140 It also removes excess statements from static initializers. */
4141 static void
4142 chkp_instrument_function (void)
4143 {
4144 basic_block bb, next;
4145 gimple_stmt_iterator i;
4146 enum gimple_rhs_class grhs_class;
4147 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4148
4149 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4150 do
4151 {
4152 next = bb->next_bb;
4153 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4154 {
4155 gimple *s = gsi_stmt (i);
4156
4157 /* Skip statement marked to not be instrumented. */
4158 if (chkp_marked_stmt_p (s))
4159 {
4160 gsi_next (&i);
4161 continue;
4162 }
4163
4164 chkp_replace_function_pointers (&i);
4165
4166 switch (gimple_code (s))
4167 {
4168 case GIMPLE_ASSIGN:
4169 chkp_process_stmt (&i, gimple_assign_lhs (s),
4170 gimple_location (s), integer_one_node,
4171 NULL_TREE, NULL_TREE, safe);
4172 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4173 gimple_location (s), integer_zero_node,
4174 NULL_TREE, NULL_TREE, safe);
4175 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4176 if (grhs_class == GIMPLE_BINARY_RHS)
4177 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4178 gimple_location (s), integer_zero_node,
4179 NULL_TREE, NULL_TREE, safe);
4180 break;
4181
4182 case GIMPLE_RETURN:
4183 {
4184 greturn *r = as_a <greturn *> (s);
4185 if (gimple_return_retval (r) != NULL_TREE)
4186 {
4187 chkp_process_stmt (&i, gimple_return_retval (r),
4188 gimple_location (r),
4189 integer_zero_node,
4190 NULL_TREE, NULL_TREE, safe);
4191
4192 /* Additionally we need to add bounds
4193 to return statement. */
4194 chkp_add_bounds_to_ret_stmt (&i);
4195 }
4196 }
4197 break;
4198
4199 case GIMPLE_CALL:
4200 chkp_add_bounds_to_call_stmt (&i);
4201 break;
4202
4203 default:
4204 ;
4205 }
4206
4207 gsi_next (&i);
4208
4209 /* We do not need any actual pointer stores in checker
4210 static initializer. */
4211 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4212 && gimple_code (s) == GIMPLE_ASSIGN
4213 && gimple_store_p (s))
4214 {
4215 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4216 gsi_remove (&del_iter, true);
4217 unlink_stmt_vdef (s);
4218 release_defs(s);
4219 }
4220 }
4221 bb = next;
4222 }
4223 while (bb);
4224
4225 /* Some input params may have bounds and be address taken. In this case
4226 we should store incoming bounds into bounds table. */
4227 tree arg;
4228 if (flag_chkp_store_bounds)
4229 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4230 if (TREE_ADDRESSABLE (arg))
4231 {
4232 if (BOUNDED_P (arg))
4233 {
4234 tree bounds = chkp_get_next_bounds_parm (arg);
4235 tree def_ptr = ssa_default_def (cfun, arg);
4236 gimple_stmt_iterator iter
4237 = gsi_start_bb (chkp_get_entry_block ());
4238 chkp_build_bndstx (chkp_build_addr_expr (arg),
4239 def_ptr ? def_ptr : arg,
4240 bounds, &iter);
4241
4242 /* Skip bounds arg. */
4243 arg = TREE_CHAIN (arg);
4244 }
4245 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4246 {
4247 tree orig_arg = arg;
4248 bitmap slots = BITMAP_ALLOC (NULL);
4249 gimple_stmt_iterator iter
4250 = gsi_start_bb (chkp_get_entry_block ());
4251 bitmap_iterator bi;
4252 unsigned bnd_no;
4253
4254 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4255
4256 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4257 {
4258 tree bounds = chkp_get_next_bounds_parm (arg);
4259 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4260 tree addr = chkp_build_addr_expr (orig_arg);
4261 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4262 build_int_cst (ptr_type_node, offs));
4263 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4264 bounds, &iter);
4265
4266 arg = DECL_CHAIN (arg);
4267 }
4268 BITMAP_FREE (slots);
4269 }
4270 }
4271 }
4272
4273 /* Find init/null/copy_ptr_bounds calls and replace them
4274 with assignments. It should allow better code
4275 optimization. */
4276
4277 static void
4278 chkp_remove_useless_builtins ()
4279 {
4280 basic_block bb;
4281 gimple_stmt_iterator gsi;
4282
4283 FOR_EACH_BB_FN (bb, cfun)
4284 {
4285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4286 {
4287 gimple *stmt = gsi_stmt (gsi);
4288 tree fndecl;
4289 enum built_in_function fcode;
4290
4291 /* Find builtins returning first arg and replace
4292 them with assignments. */
4293 if (gimple_code (stmt) == GIMPLE_CALL
4294 && (fndecl = gimple_call_fndecl (stmt))
4295 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4296 && (fcode = DECL_FUNCTION_CODE (fndecl))
4297 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4298 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4299 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4300 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4301 {
4302 tree res = gimple_call_arg (stmt, 0);
4303 update_call_from_tree (&gsi, res);
4304 stmt = gsi_stmt (gsi);
4305 update_stmt (stmt);
4306 }
4307 }
4308 }
4309 }
4310
4311 /* Initialize pass. */
4312 static void
4313 chkp_init (void)
4314 {
4315 basic_block bb;
4316 gimple_stmt_iterator i;
4317
4318 in_chkp_pass = true;
4319
4320 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4321 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4322 chkp_unmark_stmt (gsi_stmt (i));
4323
4324 chkp_invalid_bounds = new hash_set<tree>;
4325 chkp_completed_bounds_set = new hash_set<tree>;
4326 delete chkp_reg_bounds;
4327 chkp_reg_bounds = new hash_map<tree, tree>;
4328 delete chkp_bound_vars;
4329 chkp_bound_vars = new hash_map<tree, tree>;
4330 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4331 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4332 delete chkp_bounds_map;
4333 chkp_bounds_map = new hash_map<tree, tree>;
4334 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4335
4336 entry_block = NULL;
4337 zero_bounds = NULL_TREE;
4338 none_bounds = NULL_TREE;
4339 incomplete_bounds = integer_zero_node;
4340 tmp_var = NULL_TREE;
4341 size_tmp_var = NULL_TREE;
4342
4343 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4344
4345 /* We create these constant bounds once for each object file.
4346 These symbols go to comdat section and result in single copy
4347 of each one in the final binary. */
4348 chkp_get_zero_bounds_var ();
4349 chkp_get_none_bounds_var ();
4350
4351 calculate_dominance_info (CDI_DOMINATORS);
4352 calculate_dominance_info (CDI_POST_DOMINATORS);
4353
4354 bitmap_obstack_initialize (NULL);
4355 }
4356
4357 /* Finalize instrumentation pass. */
4358 static void
4359 chkp_fini (void)
4360 {
4361 in_chkp_pass = false;
4362
4363 delete chkp_invalid_bounds;
4364 delete chkp_completed_bounds_set;
4365 delete chkp_reg_addr_bounds;
4366 delete chkp_incomplete_bounds_map;
4367
4368 free_dominance_info (CDI_DOMINATORS);
4369 free_dominance_info (CDI_POST_DOMINATORS);
4370
4371 bitmap_obstack_release (NULL);
4372
4373 entry_block = NULL;
4374 zero_bounds = NULL_TREE;
4375 none_bounds = NULL_TREE;
4376 }
4377
4378 /* Main instrumentation pass function. */
4379 static unsigned int
4380 chkp_execute (void)
4381 {
4382 chkp_init ();
4383
4384 chkp_instrument_function ();
4385
4386 chkp_remove_useless_builtins ();
4387
4388 chkp_function_mark_instrumented (cfun->decl);
4389
4390 chkp_fix_cfg ();
4391
4392 chkp_fini ();
4393
4394 return 0;
4395 }
4396
4397 /* Instrumentation pass gate. */
4398 static bool
4399 chkp_gate (void)
4400 {
4401 cgraph_node *node = cgraph_node::get (cfun->decl);
4402 return ((node != NULL
4403 && node->instrumentation_clone)
4404 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4405 }
4406
4407 namespace {
4408
4409 const pass_data pass_data_chkp =
4410 {
4411 GIMPLE_PASS, /* type */
4412 "chkp", /* name */
4413 OPTGROUP_NONE, /* optinfo_flags */
4414 TV_NONE, /* tv_id */
4415 PROP_ssa | PROP_cfg, /* properties_required */
4416 0, /* properties_provided */
4417 0, /* properties_destroyed */
4418 0, /* todo_flags_start */
4419 TODO_verify_il
4420 | TODO_update_ssa /* todo_flags_finish */
4421 };
4422
4423 class pass_chkp : public gimple_opt_pass
4424 {
4425 public:
4426 pass_chkp (gcc::context *ctxt)
4427 : gimple_opt_pass (pass_data_chkp, ctxt)
4428 {}
4429
4430 /* opt_pass methods: */
4431 virtual opt_pass * clone ()
4432 {
4433 return new pass_chkp (m_ctxt);
4434 }
4435
4436 virtual bool gate (function *)
4437 {
4438 return chkp_gate ();
4439 }
4440
4441 virtual unsigned int execute (function *)
4442 {
4443 return chkp_execute ();
4444 }
4445
4446 }; // class pass_chkp
4447
4448 } // anon namespace
4449
4450 gimple_opt_pass *
4451 make_pass_chkp (gcc::context *ctxt)
4452 {
4453 return new pass_chkp (ctxt);
4454 }
4455
4456 #include "gt-tree-chkp.h"