decl.c (value_annotation_hasher::handle_cache_entry): Delete.
[gcc.git] / gcc / tree-chkp.c
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "alias.h"
25 #include "symtab.h"
26 #include "options.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stor-layout.h"
30 #include "varasm.h"
31 #include "target.h"
32 #include "tree-iterator.h"
33 #include "tree-cfg.h"
34 #include "langhooks.h"
35 #include "tree-pass.h"
36 #include "diagnostic.h"
37 #include "cfgloop.h"
38 #include "stringpool.h"
39 #include "tree-ssa-alias.h"
40 #include "tree-ssanames.h"
41 #include "tree-ssa-operands.h"
42 #include "tree-ssa-address.h"
43 #include "tree-ssa.h"
44 #include "predict.h"
45 #include "dominance.h"
46 #include "cfg.h"
47 #include "basic-block.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "gimple-expr.h"
50 #include "gimple.h"
51 #include "tree-phinodes.h"
52 #include "gimple-ssa.h"
53 #include "ssa-iterators.h"
54 #include "gimple-pretty-print.h"
55 #include "gimple-iterator.h"
56 #include "gimplify.h"
57 #include "gimplify-me.h"
58 #include "print-tree.h"
59 #include "tm.h"
60 #include "hard-reg-set.h"
61 #include "function.h"
62 #include "rtl.h"
63 #include "flags.h"
64 #include "insn-config.h"
65 #include "expmed.h"
66 #include "dojump.h"
67 #include "explow.h"
68 #include "calls.h"
69 #include "emit-rtl.h"
70 #include "stmt.h"
71 #include "expr.h"
72 #include "tree-ssa-propagate.h"
73 #include "gimple-fold.h"
74 #include "tree-chkp.h"
75 #include "gimple-walk.h"
76 #include "rtl.h" /* For MEM_P, assign_temp. */
77 #include "tree-dfa.h"
78 #include "ipa-ref.h"
79 #include "lto-streamer.h"
80 #include "cgraph.h"
81 #include "ipa-chkp.h"
82 #include "params.h"
83
84 /* Pointer Bounds Checker instruments code with memory checks to find
85 out-of-bounds memory accesses. Checks are performed by computing
86 bounds for each pointer and then comparing address of accessed
87 memory before pointer dereferencing.
88
89 1. Function clones.
90
91 See ipa-chkp.c.
92
93 2. Instrumentation.
94
95 There are few things to instrument:
96
97 a) Memory accesses - add checker calls to check address of accessed memory
98 against bounds of dereferenced pointer. Obviously safe memory
99 accesses like static variable access does not have to be instrumented
100 with checks.
101
102 Example:
103
104 val_2 = *p_1;
105
106 with 4 bytes access is transformed into:
107
108 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
109 D.1_4 = p_1 + 3;
110 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
111 val_2 = *p_1;
112
113 where __bound_tmp.1_3 are bounds computed for pointer p_1,
114 __builtin___chkp_bndcl is a lower bound check and
115 __builtin___chkp_bndcu is an upper bound check.
116
117 b) Pointer stores.
118
119 When pointer is stored in memory we need to store its bounds. To
120 achieve compatibility of instrumented code with regular codes
121 we have to keep data layout and store bounds in special bound tables
122 via special checker call. Implementation of bounds table may vary for
123 different platforms. It has to associate pointer value and its
124 location (it is required because we may have two equal pointers
125 with different bounds stored in different places) with bounds.
126 Another checker builtin allows to get bounds for specified pointer
127 loaded from specified location.
128
129 Example:
130
131 buf1[i_1] = &buf2;
132
133 is transformed into:
134
135 buf1[i_1] = &buf2;
136 D.1_2 = &buf1[i_1];
137 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
138
139 where __bound_tmp.1_2 are bounds of &buf2.
140
141 c) Static initialization.
142
143 The special case of pointer store is static pointer initialization.
144 Bounds initialization is performed in a few steps:
145 - register all static initializations in front-end using
146 chkp_register_var_initializer
147 - when file compilation finishes we create functions with special
148 attribute 'chkp ctor' and put explicit initialization code
149 (assignments) for all statically initialized pointers.
150 - when checker constructor is compiled checker pass adds required
151 bounds initialization for all statically initialized pointers
152 - since we do not actually need excess pointers initialization
153 in checker constructor we remove such assignments from them
154
155 d) Calls.
156
157 For each call in the code we add additional arguments to pass
158 bounds for pointer arguments. We determine type of call arguments
159 using arguments list from function declaration; if function
160 declaration is not available we use function type; otherwise
161 (e.g. for unnamed arguments) we use type of passed value. Function
162 declaration/type is replaced with the instrumented one.
163
164 Example:
165
166 val_1 = foo (&buf1, &buf2, &buf1, 0);
167
168 is translated into:
169
170 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
171 &buf1, __bound_tmp.1_2, 0);
172
173 e) Returns.
174
175 If function returns a pointer value we have to return bounds also.
176 A new operand was added for return statement to hold returned bounds.
177
178 Example:
179
180 return &_buf1;
181
182 is transformed into
183
184 return &_buf1, __bound_tmp.1_1;
185
186 3. Bounds computation.
187
188 Compiler is fully responsible for computing bounds to be used for each
189 memory access. The first step for bounds computation is to find the
190 origin of pointer dereferenced for memory access. Basing on pointer
191 origin we define a way to compute its bounds. There are just few
192 possible cases:
193
194 a) Pointer is returned by call.
195
196 In this case we use corresponding checker builtin method to obtain returned
197 bounds.
198
199 Example:
200
201 buf_1 = malloc (size_2);
202 foo (buf_1);
203
204 is translated into:
205
206 buf_1 = malloc (size_2);
207 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
208 foo (buf_1, __bound_tmp.1_3);
209
210 b) Pointer is an address of an object.
211
212 In this case compiler tries to compute objects size and create corresponding
213 bounds. If object has incomplete type then special checker builtin is used to
214 obtain its size at runtime.
215
216 Example:
217
218 foo ()
219 {
220 <unnamed type> __bound_tmp.3;
221 static int buf[100];
222
223 <bb 3>:
224 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
225
226 <bb 2>:
227 return &buf, __bound_tmp.3_2;
228 }
229
230 Example:
231
232 Address of an object 'extern int buf[]' with incomplete type is
233 returned.
234
235 foo ()
236 {
237 <unnamed type> __bound_tmp.4;
238 long unsigned int __size_tmp.3;
239
240 <bb 3>:
241 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
242 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
243
244 <bb 2>:
245 return &buf, __bound_tmp.4_3;
246 }
247
248 c) Pointer is the result of object narrowing.
249
250 It happens when we use pointer to an object to compute pointer to a part
251 of an object. E.g. we take pointer to a field of a structure. In this
252 case we perform bounds intersection using bounds of original object and
253 bounds of object's part (which are computed basing on its type).
254
255 There may be some debatable questions about when narrowing should occur
256 and when it should not. To avoid false bound violations in correct
257 programs we do not perform narrowing when address of an array element is
258 obtained (it has address of the whole array) and when address of the first
259 structure field is obtained (because it is guaranteed to be equal to
260 address of the whole structure and it is legal to cast it back to structure).
261
262 Default narrowing behavior may be changed using compiler flags.
263
264 Example:
265
266 In this example address of the second structure field is returned.
267
268 foo (struct A * p, __bounds_type __bounds_of_p)
269 {
270 <unnamed type> __bound_tmp.3;
271 int * _2;
272 int * _5;
273
274 <bb 2>:
275 _5 = &p_1(D)->second_field;
276 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
277 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
278 __bounds_of_p_3(D));
279 _2 = &p_1(D)->second_field;
280 return _2, __bound_tmp.3_8;
281 }
282
283 Example:
284
285 In this example address of the first field of array element is returned.
286
287 foo (struct A * p, __bounds_type __bounds_of_p, int i)
288 {
289 long unsigned int _3;
290 long unsigned int _4;
291 struct A * _6;
292 int * _7;
293
294 <bb 2>:
295 _3 = (long unsigned int) i_1(D);
296 _4 = _3 * 8;
297 _6 = p_5(D) + _4;
298 _7 = &_6->first_field;
299 return _7, __bounds_of_p_2(D);
300 }
301
302
303 d) Pointer is the result of pointer arithmetic or type cast.
304
305 In this case bounds of the base pointer are used. In case of binary
306 operation producing a pointer we are analyzing data flow further
307 looking for operand's bounds. One operand is considered as a base
308 if it has some valid bounds. If we fall into a case when none of
309 operands (or both of them) has valid bounds, a default bounds value
310 is used.
311
312 Trying to find out bounds for binary operations we may fall into
313 cyclic dependencies for pointers. To avoid infinite recursion all
314 walked phi nodes instantly obtain corresponding bounds but created
315 bounds are marked as incomplete. It helps us to stop DF walk during
316 bounds search.
317
318 When we reach pointer source, some args of incomplete bounds phi obtain
319 valid bounds and those values are propagated further through phi nodes.
320 If no valid bounds were found for phi node then we mark its result as
321 invalid bounds. Process stops when all incomplete bounds become either
322 valid or invalid and we are able to choose a pointer base.
323
324 e) Pointer is loaded from the memory.
325
326 In this case we just need to load bounds from the bounds table.
327
328 Example:
329
330 foo ()
331 {
332 <unnamed type> __bound_tmp.3;
333 static int * buf;
334 int * _2;
335
336 <bb 2>:
337 _2 = buf;
338 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
339 return _2, __bound_tmp.3_4;
340 }
341
342 */
343
344 typedef void (*assign_handler)(tree, tree, void *);
345
346 static tree chkp_get_zero_bounds ();
347 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
348 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
349 gimple_stmt_iterator *iter);
350 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
351 tree *elt, bool *safe,
352 bool *bitfield,
353 tree *bounds,
354 gimple_stmt_iterator *iter,
355 bool innermost_bounds);
356
357 #define chkp_bndldx_fndecl \
358 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
359 #define chkp_bndstx_fndecl \
360 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
361 #define chkp_checkl_fndecl \
362 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
363 #define chkp_checku_fndecl \
364 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
365 #define chkp_bndmk_fndecl \
366 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
367 #define chkp_ret_bnd_fndecl \
368 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
369 #define chkp_intersect_fndecl \
370 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
371 #define chkp_narrow_bounds_fndecl \
372 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
373 #define chkp_sizeof_fndecl \
374 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
375 #define chkp_extract_lower_fndecl \
376 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
377 #define chkp_extract_upper_fndecl \
378 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
379
380 static GTY (()) tree chkp_uintptr_type;
381
382 static GTY (()) tree chkp_zero_bounds_var;
383 static GTY (()) tree chkp_none_bounds_var;
384
385 static GTY (()) basic_block entry_block;
386 static GTY (()) tree zero_bounds;
387 static GTY (()) tree none_bounds;
388 static GTY (()) tree incomplete_bounds;
389 static GTY (()) tree tmp_var;
390 static GTY (()) tree size_tmp_var;
391 static GTY (()) bitmap chkp_abnormal_copies;
392
393 struct hash_set<tree> *chkp_invalid_bounds;
394 struct hash_set<tree> *chkp_completed_bounds_set;
395 struct hash_map<tree, tree> *chkp_reg_bounds;
396 struct hash_map<tree, tree> *chkp_bound_vars;
397 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
398 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
399 struct hash_map<tree, tree> *chkp_bounds_map;
400 struct hash_map<tree, tree> *chkp_static_var_bounds;
401
402 static bool in_chkp_pass;
403
404 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
405 #define CHKP_SIZE_TMP_NAME "__size_tmp"
406 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
407 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
408 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
409 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
410 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
411
412 /* Static checker constructors may become very large and their
413 compilation with optimization may take too much time.
414 Therefore we put a limit to number of statements in one
415 constructor. Tests with 100 000 statically initialized
416 pointers showed following compilation times on Sandy Bridge
417 server (used -O2):
418 limit 100 => ~18 sec.
419 limit 300 => ~22 sec.
420 limit 1000 => ~30 sec.
421 limit 3000 => ~49 sec.
422 limit 5000 => ~55 sec.
423 limit 10000 => ~76 sec.
424 limit 100000 => ~532 sec. */
425 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
426
427 struct chkp_ctor_stmt_list
428 {
429 tree stmts;
430 int avail;
431 };
432
433 /* Return 1 if function FNDECL is instrumented by Pointer
434 Bounds Checker. */
435 bool
436 chkp_function_instrumented_p (tree fndecl)
437 {
438 return fndecl
439 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
440 }
441
442 /* Mark function FNDECL as instrumented. */
443 void
444 chkp_function_mark_instrumented (tree fndecl)
445 {
446 if (chkp_function_instrumented_p (fndecl))
447 return;
448
449 DECL_ATTRIBUTES (fndecl)
450 = tree_cons (get_identifier ("chkp instrumented"), NULL,
451 DECL_ATTRIBUTES (fndecl));
452 }
453
454 /* Return true when STMT is builtin call to instrumentation function
455 corresponding to CODE. */
456
457 bool
458 chkp_gimple_call_builtin_p (gimple call,
459 enum built_in_function code)
460 {
461 tree fndecl;
462 if (is_gimple_call (call)
463 && (fndecl = targetm.builtin_chkp_function (code))
464 && gimple_call_fndecl (call) == fndecl)
465 return true;
466 return false;
467 }
468
469 /* Emit code to build zero bounds and return RTL holding
470 the result. */
471 rtx
472 chkp_expand_zero_bounds ()
473 {
474 tree zero_bnd;
475
476 if (flag_chkp_use_static_const_bounds)
477 zero_bnd = chkp_get_zero_bounds_var ();
478 else
479 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
480 integer_zero_node);
481 return expand_normal (zero_bnd);
482 }
483
484 /* Emit code to store zero bounds for PTR located at MEM. */
485 void
486 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
487 {
488 tree zero_bnd, bnd, addr, bndstx;
489
490 if (flag_chkp_use_static_const_bounds)
491 zero_bnd = chkp_get_zero_bounds_var ();
492 else
493 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
494 integer_zero_node);
495 bnd = make_tree (pointer_bounds_type_node,
496 assign_temp (pointer_bounds_type_node, 0, 1));
497 addr = build1 (ADDR_EXPR,
498 build_pointer_type (TREE_TYPE (mem)), mem);
499 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
500
501 expand_assignment (bnd, zero_bnd, false);
502 expand_normal (bndstx);
503 }
504
505 /* Build retbnd call for returned value RETVAL.
506
507 If BNDVAL is not NULL then result is stored
508 in it. Otherwise a temporary is created to
509 hold returned value.
510
511 GSI points to a position for a retbnd call
512 and is set to created stmt.
513
514 Cgraph edge is created for a new call if
515 UPDATE_EDGE is 1.
516
517 Obtained bounds are returned. */
518 tree
519 chkp_insert_retbnd_call (tree bndval, tree retval,
520 gimple_stmt_iterator *gsi)
521 {
522 gimple call;
523
524 if (!bndval)
525 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
526
527 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
528 gimple_call_set_lhs (call, bndval);
529 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
530
531 return bndval;
532 }
533
534 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
535 arguments. */
536
537 gcall *
538 chkp_copy_call_skip_bounds (gcall *call)
539 {
540 bitmap bounds;
541 unsigned i;
542
543 bitmap_obstack_initialize (NULL);
544 bounds = BITMAP_ALLOC (NULL);
545
546 for (i = 0; i < gimple_call_num_args (call); i++)
547 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
548 bitmap_set_bit (bounds, i);
549
550 if (!bitmap_empty_p (bounds))
551 call = gimple_call_copy_skip_args (call, bounds);
552 gimple_call_set_with_bounds (call, false);
553
554 BITMAP_FREE (bounds);
555 bitmap_obstack_release (NULL);
556
557 return call;
558 }
559
560 /* Redirect edge E to the correct node according to call_stmt.
561 Return 1 if bounds removal from call_stmt should be done
562 instead of redirection. */
563
564 bool
565 chkp_redirect_edge (cgraph_edge *e)
566 {
567 bool instrumented = false;
568 tree decl = e->callee->decl;
569
570 if (e->callee->instrumentation_clone
571 || chkp_function_instrumented_p (decl))
572 instrumented = true;
573
574 if (instrumented
575 && !gimple_call_with_bounds_p (e->call_stmt))
576 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
577 else if (!instrumented
578 && gimple_call_with_bounds_p (e->call_stmt)
579 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
580 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
581 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
582 {
583 if (e->callee->instrumented_version)
584 e->redirect_callee (e->callee->instrumented_version);
585 else
586 {
587 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
588 /* Avoid bounds removal if all args will be removed. */
589 if (!args || TREE_VALUE (args) != void_type_node)
590 return true;
591 else
592 gimple_call_set_with_bounds (e->call_stmt, false);
593 }
594 }
595
596 return false;
597 }
598
599 /* Mark statement S to not be instrumented. */
600 static void
601 chkp_mark_stmt (gimple s)
602 {
603 gimple_set_plf (s, GF_PLF_1, true);
604 }
605
606 /* Mark statement S to be instrumented. */
607 static void
608 chkp_unmark_stmt (gimple s)
609 {
610 gimple_set_plf (s, GF_PLF_1, false);
611 }
612
613 /* Return 1 if statement S should not be instrumented. */
614 static bool
615 chkp_marked_stmt_p (gimple s)
616 {
617 return gimple_plf (s, GF_PLF_1);
618 }
619
620 /* Get var to be used for bound temps. */
621 static tree
622 chkp_get_tmp_var (void)
623 {
624 if (!tmp_var)
625 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
626
627 return tmp_var;
628 }
629
630 /* Get SSA_NAME to be used as temp. */
631 static tree
632 chkp_get_tmp_reg (gimple stmt)
633 {
634 if (in_chkp_pass)
635 return make_ssa_name (chkp_get_tmp_var (), stmt);
636
637 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
638 CHKP_BOUND_TMP_NAME);
639 }
640
641 /* Get var to be used for size temps. */
642 static tree
643 chkp_get_size_tmp_var (void)
644 {
645 if (!size_tmp_var)
646 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
647
648 return size_tmp_var;
649 }
650
651 /* Register bounds BND for address of OBJ. */
652 static void
653 chkp_register_addr_bounds (tree obj, tree bnd)
654 {
655 if (bnd == incomplete_bounds)
656 return;
657
658 chkp_reg_addr_bounds->put (obj, bnd);
659
660 if (dump_file && (dump_flags & TDF_DETAILS))
661 {
662 fprintf (dump_file, "Regsitered bound ");
663 print_generic_expr (dump_file, bnd, 0);
664 fprintf (dump_file, " for address of ");
665 print_generic_expr (dump_file, obj, 0);
666 fprintf (dump_file, "\n");
667 }
668 }
669
670 /* Return bounds registered for address of OBJ. */
671 static tree
672 chkp_get_registered_addr_bounds (tree obj)
673 {
674 tree *slot = chkp_reg_addr_bounds->get (obj);
675 return slot ? *slot : NULL_TREE;
676 }
677
678 /* Mark BOUNDS as completed. */
679 static void
680 chkp_mark_completed_bounds (tree bounds)
681 {
682 chkp_completed_bounds_set->add (bounds);
683
684 if (dump_file && (dump_flags & TDF_DETAILS))
685 {
686 fprintf (dump_file, "Marked bounds ");
687 print_generic_expr (dump_file, bounds, 0);
688 fprintf (dump_file, " as completed\n");
689 }
690 }
691
692 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
693 static bool
694 chkp_completed_bounds (tree bounds)
695 {
696 return chkp_completed_bounds_set->contains (bounds);
697 }
698
699 /* Clear comleted bound marks. */
700 static void
701 chkp_erase_completed_bounds (void)
702 {
703 delete chkp_completed_bounds_set;
704 chkp_completed_bounds_set = new hash_set<tree>;
705 }
706
707 /* Mark BOUNDS associated with PTR as incomplete. */
708 static void
709 chkp_register_incomplete_bounds (tree bounds, tree ptr)
710 {
711 chkp_incomplete_bounds_map->put (bounds, ptr);
712
713 if (dump_file && (dump_flags & TDF_DETAILS))
714 {
715 fprintf (dump_file, "Regsitered incomplete bounds ");
716 print_generic_expr (dump_file, bounds, 0);
717 fprintf (dump_file, " for ");
718 print_generic_expr (dump_file, ptr, 0);
719 fprintf (dump_file, "\n");
720 }
721 }
722
723 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
724 static bool
725 chkp_incomplete_bounds (tree bounds)
726 {
727 if (bounds == incomplete_bounds)
728 return true;
729
730 if (chkp_completed_bounds (bounds))
731 return false;
732
733 return chkp_incomplete_bounds_map->get (bounds) != NULL;
734 }
735
736 /* Clear incomleted bound marks. */
737 static void
738 chkp_erase_incomplete_bounds (void)
739 {
740 delete chkp_incomplete_bounds_map;
741 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
742 }
743
744 /* Build and return bndmk call which creates bounds for structure
745 pointed by PTR. Structure should have complete type. */
746 tree
747 chkp_make_bounds_for_struct_addr (tree ptr)
748 {
749 tree type = TREE_TYPE (ptr);
750 tree size;
751
752 gcc_assert (POINTER_TYPE_P (type));
753
754 size = TYPE_SIZE (TREE_TYPE (type));
755
756 gcc_assert (size);
757
758 return build_call_nary (pointer_bounds_type_node,
759 build_fold_addr_expr (chkp_bndmk_fndecl),
760 2, ptr, size);
761 }
762
763 /* Traversal function for chkp_may_finish_incomplete_bounds.
764 Set RES to 0 if at least one argument of phi statement
765 defining bounds (passed in KEY arg) is unknown.
766 Traversal stops when first unknown phi argument is found. */
767 bool
768 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
769 bool *res)
770 {
771 gimple phi;
772 unsigned i;
773
774 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
775
776 phi = SSA_NAME_DEF_STMT (bounds);
777
778 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
779
780 for (i = 0; i < gimple_phi_num_args (phi); i++)
781 {
782 tree phi_arg = gimple_phi_arg_def (phi, i);
783 if (!phi_arg)
784 {
785 *res = false;
786 /* Do not need to traverse further. */
787 return false;
788 }
789 }
790
791 return true;
792 }
793
794 /* Return 1 if all phi nodes created for bounds have their
795 arguments computed. */
796 static bool
797 chkp_may_finish_incomplete_bounds (void)
798 {
799 bool res = true;
800
801 chkp_incomplete_bounds_map
802 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
803
804 return res;
805 }
806
807 /* Helper function for chkp_finish_incomplete_bounds.
808 Recompute args for bounds phi node. */
809 bool
810 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
811 void *res ATTRIBUTE_UNUSED)
812 {
813 tree ptr = *slot;
814 gphi *bounds_phi;
815 gphi *ptr_phi;
816 unsigned i;
817
818 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
819 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
820
821 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
822 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
823
824 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
825 {
826 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
827 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
828
829 add_phi_arg (bounds_phi, bound_arg,
830 gimple_phi_arg_edge (ptr_phi, i),
831 UNKNOWN_LOCATION);
832 }
833
834 return true;
835 }
836
837 /* Mark BOUNDS as invalid. */
838 static void
839 chkp_mark_invalid_bounds (tree bounds)
840 {
841 chkp_invalid_bounds->add (bounds);
842
843 if (dump_file && (dump_flags & TDF_DETAILS))
844 {
845 fprintf (dump_file, "Marked bounds ");
846 print_generic_expr (dump_file, bounds, 0);
847 fprintf (dump_file, " as invalid\n");
848 }
849 }
850
851 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
852 static bool
853 chkp_valid_bounds (tree bounds)
854 {
855 if (bounds == zero_bounds || bounds == none_bounds)
856 return false;
857
858 return !chkp_invalid_bounds->contains (bounds);
859 }
860
861 /* Helper function for chkp_finish_incomplete_bounds.
862 Check all arguments of phi nodes trying to find
863 valid completed bounds. If there is at least one
864 such arg then bounds produced by phi node are marked
865 as valid completed bounds and all phi args are
866 recomputed. */
867 bool
868 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
869 {
870 gimple phi;
871 unsigned i;
872
873 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
874
875 if (chkp_completed_bounds (bounds))
876 return true;
877
878 phi = SSA_NAME_DEF_STMT (bounds);
879
880 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
881
882 for (i = 0; i < gimple_phi_num_args (phi); i++)
883 {
884 tree phi_arg = gimple_phi_arg_def (phi, i);
885
886 gcc_assert (phi_arg);
887
888 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
889 {
890 *res = true;
891 chkp_mark_completed_bounds (bounds);
892 chkp_recompute_phi_bounds (bounds, slot, NULL);
893 return true;
894 }
895 }
896
897 return true;
898 }
899
900 /* Helper function for chkp_finish_incomplete_bounds.
901 Marks all incompleted bounds as invalid. */
902 bool
903 chkp_mark_invalid_bounds_walker (tree const &bounds,
904 tree *slot ATTRIBUTE_UNUSED,
905 void *res ATTRIBUTE_UNUSED)
906 {
907 if (!chkp_completed_bounds (bounds))
908 {
909 chkp_mark_invalid_bounds (bounds);
910 chkp_mark_completed_bounds (bounds);
911 }
912 return true;
913 }
914
915 /* When all bound phi nodes have all their args computed
916 we have enough info to find valid bounds. We iterate
917 through all incompleted bounds searching for valid
918 bounds. Found valid bounds are marked as completed
919 and all remaining incompleted bounds are recomputed.
920 Process continues until no new valid bounds may be
921 found. All remained incompleted bounds are marked as
922 invalid (i.e. have no valid source of bounds). */
923 static void
924 chkp_finish_incomplete_bounds (void)
925 {
926 bool found_valid;
927
928 while (found_valid)
929 {
930 found_valid = false;
931
932 chkp_incomplete_bounds_map->
933 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
934
935 if (found_valid)
936 chkp_incomplete_bounds_map->
937 traverse<void *, chkp_recompute_phi_bounds> (NULL);
938 }
939
940 chkp_incomplete_bounds_map->
941 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
942 chkp_incomplete_bounds_map->
943 traverse<void *, chkp_recompute_phi_bounds> (NULL);
944
945 chkp_erase_completed_bounds ();
946 chkp_erase_incomplete_bounds ();
947 }
948
949 /* Return 1 if type TYPE is a pointer type or a
950 structure having a pointer type as one of its fields.
951 Otherwise return 0. */
952 bool
953 chkp_type_has_pointer (const_tree type)
954 {
955 bool res = false;
956
957 if (BOUNDED_TYPE_P (type))
958 res = true;
959 else if (RECORD_OR_UNION_TYPE_P (type))
960 {
961 tree field;
962
963 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
964 if (TREE_CODE (field) == FIELD_DECL)
965 res = res || chkp_type_has_pointer (TREE_TYPE (field));
966 }
967 else if (TREE_CODE (type) == ARRAY_TYPE)
968 res = chkp_type_has_pointer (TREE_TYPE (type));
969
970 return res;
971 }
972
973 unsigned
974 chkp_type_bounds_count (const_tree type)
975 {
976 unsigned res = 0;
977
978 if (!type)
979 res = 0;
980 else if (BOUNDED_TYPE_P (type))
981 res = 1;
982 else if (RECORD_OR_UNION_TYPE_P (type))
983 {
984 bitmap have_bound;
985
986 bitmap_obstack_initialize (NULL);
987 have_bound = BITMAP_ALLOC (NULL);
988 chkp_find_bound_slots (type, have_bound);
989 res = bitmap_count_bits (have_bound);
990 BITMAP_FREE (have_bound);
991 bitmap_obstack_release (NULL);
992 }
993
994 return res;
995 }
996
997 /* Get bounds associated with NODE via
998 chkp_set_bounds call. */
999 tree
1000 chkp_get_bounds (tree node)
1001 {
1002 tree *slot;
1003
1004 if (!chkp_bounds_map)
1005 return NULL_TREE;
1006
1007 slot = chkp_bounds_map->get (node);
1008 return slot ? *slot : NULL_TREE;
1009 }
1010
1011 /* Associate bounds VAL with NODE. */
1012 void
1013 chkp_set_bounds (tree node, tree val)
1014 {
1015 if (!chkp_bounds_map)
1016 chkp_bounds_map = new hash_map<tree, tree>;
1017
1018 chkp_bounds_map->put (node, val);
1019 }
1020
1021 /* Check if statically initialized variable VAR require
1022 static bounds initialization. If VAR is added into
1023 bounds initlization list then 1 is returned. Otherwise
1024 return 0. */
1025 extern bool
1026 chkp_register_var_initializer (tree var)
1027 {
1028 if (!flag_check_pointer_bounds
1029 || DECL_INITIAL (var) == error_mark_node)
1030 return false;
1031
1032 gcc_assert (TREE_CODE (var) == VAR_DECL);
1033 gcc_assert (DECL_INITIAL (var));
1034
1035 if (TREE_STATIC (var)
1036 && chkp_type_has_pointer (TREE_TYPE (var)))
1037 {
1038 varpool_node::get_create (var)->need_bounds_init = 1;
1039 return true;
1040 }
1041
1042 return false;
1043 }
1044
1045 /* Helper function for chkp_finish_file.
1046
1047 Add new modification statement (RHS is assigned to LHS)
1048 into list of static initializer statementes (passed in ARG).
1049 If statements list becomes too big, emit checker constructor
1050 and start the new one. */
1051 static void
1052 chkp_add_modification_to_stmt_list (tree lhs,
1053 tree rhs,
1054 void *arg)
1055 {
1056 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1057 tree modify;
1058
1059 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1060 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1061
1062 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1063 append_to_statement_list (modify, &stmts->stmts);
1064
1065 stmts->avail--;
1066 }
1067
1068 /* Build and return ADDR_EXPR for specified object OBJ. */
1069 static tree
1070 chkp_build_addr_expr (tree obj)
1071 {
1072 return TREE_CODE (obj) == TARGET_MEM_REF
1073 ? tree_mem_ref_addr (ptr_type_node, obj)
1074 : build_fold_addr_expr (obj);
1075 }
1076
1077 /* Helper function for chkp_finish_file.
1078 Initialize bound variable BND_VAR with bounds of variable
1079 VAR to statements list STMTS. If statements list becomes
1080 too big, emit checker constructor and start the new one. */
1081 static void
1082 chkp_output_static_bounds (tree bnd_var, tree var,
1083 struct chkp_ctor_stmt_list *stmts)
1084 {
1085 tree lb, ub, size;
1086
1087 if (TREE_CODE (var) == STRING_CST)
1088 {
1089 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1090 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1091 }
1092 else if (DECL_SIZE (var)
1093 && !chkp_variable_size_type (TREE_TYPE (var)))
1094 {
1095 /* Compute bounds using statically known size. */
1096 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1097 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1098 }
1099 else
1100 {
1101 /* Compute bounds using dynamic size. */
1102 tree call;
1103
1104 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1105 call = build1 (ADDR_EXPR,
1106 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1107 chkp_sizeof_fndecl);
1108 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1109 call, 1, var);
1110
1111 if (flag_chkp_zero_dynamic_size_as_infinite)
1112 {
1113 tree max_size, cond;
1114
1115 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1116 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1117 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1118 }
1119
1120 size = size_binop (MINUS_EXPR, size, size_one_node);
1121 }
1122
1123 ub = size_binop (PLUS_EXPR, lb, size);
1124 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1125 &stmts->stmts);
1126 if (stmts->avail <= 0)
1127 {
1128 cgraph_build_static_cdtor ('B', stmts->stmts,
1129 MAX_RESERVED_INIT_PRIORITY + 2);
1130 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1131 stmts->stmts = NULL;
1132 }
1133 }
1134
1135 /* Return entry block to be used for checker initilization code.
1136 Create new block if required. */
1137 static basic_block
1138 chkp_get_entry_block (void)
1139 {
1140 if (!entry_block)
1141 entry_block
1142 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1143
1144 return entry_block;
1145 }
1146
1147 /* Return a bounds var to be used for pointer var PTR_VAR. */
1148 static tree
1149 chkp_get_bounds_var (tree ptr_var)
1150 {
1151 tree bnd_var;
1152 tree *slot;
1153
1154 slot = chkp_bound_vars->get (ptr_var);
1155 if (slot)
1156 bnd_var = *slot;
1157 else
1158 {
1159 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1160 CHKP_BOUND_TMP_NAME);
1161 chkp_bound_vars->put (ptr_var, bnd_var);
1162 }
1163
1164 return bnd_var;
1165 }
1166
1167 /* If BND is an abnormal bounds copy, return a copied value.
1168 Otherwise return BND. */
1169 static tree
1170 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1171 {
1172 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1173 {
1174 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1175 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1176 bnd = gimple_assign_rhs1 (bnd_def);
1177 }
1178
1179 return bnd;
1180 }
1181
1182 /* Register bounds BND for object PTR in global bounds table.
1183 A copy of bounds may be created for abnormal ssa names.
1184 Returns bounds to use for PTR. */
1185 static tree
1186 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1187 {
1188 bool abnormal_ptr;
1189
1190 if (!chkp_reg_bounds)
1191 return bnd;
1192
1193 /* Do nothing if bounds are incomplete_bounds
1194 because it means bounds will be recomputed. */
1195 if (bnd == incomplete_bounds)
1196 return bnd;
1197
1198 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1199 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1200 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1201
1202 /* A single bounds value may be reused multiple times for
1203 different pointer values. It may cause coalescing issues
1204 for abnormal SSA names. To avoid it we create a bounds
1205 copy in case it is computed for abnormal SSA name.
1206
1207 We also cannot reuse such created copies for other pointers */
1208 if (abnormal_ptr
1209 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1210 {
1211 tree bnd_var = NULL_TREE;
1212
1213 if (abnormal_ptr)
1214 {
1215 if (SSA_NAME_VAR (ptr))
1216 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1217 }
1218 else
1219 bnd_var = chkp_get_tmp_var ();
1220
1221 /* For abnormal copies we may just find original
1222 bounds and use them. */
1223 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1224 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1225 /* For undefined values we usually use none bounds
1226 value but in case of abnormal edge it may cause
1227 coalescing failures. Use default definition of
1228 bounds variable instead to avoid it. */
1229 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1230 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1231 {
1232 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1233
1234 if (dump_file && (dump_flags & TDF_DETAILS))
1235 {
1236 fprintf (dump_file, "Using default def bounds ");
1237 print_generic_expr (dump_file, bnd, 0);
1238 fprintf (dump_file, " for abnormal default def SSA name ");
1239 print_generic_expr (dump_file, ptr, 0);
1240 fprintf (dump_file, "\n");
1241 }
1242 }
1243 else
1244 {
1245 tree copy;
1246 gimple def = SSA_NAME_DEF_STMT (ptr);
1247 gimple assign;
1248 gimple_stmt_iterator gsi;
1249
1250 if (bnd_var)
1251 copy = make_ssa_name (bnd_var);
1252 else
1253 copy = make_temp_ssa_name (pointer_bounds_type_node,
1254 NULL,
1255 CHKP_BOUND_TMP_NAME);
1256 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1257 assign = gimple_build_assign (copy, bnd);
1258
1259 if (dump_file && (dump_flags & TDF_DETAILS))
1260 {
1261 fprintf (dump_file, "Creating a copy of bounds ");
1262 print_generic_expr (dump_file, bnd, 0);
1263 fprintf (dump_file, " for abnormal SSA name ");
1264 print_generic_expr (dump_file, ptr, 0);
1265 fprintf (dump_file, "\n");
1266 }
1267
1268 if (gimple_code (def) == GIMPLE_NOP)
1269 {
1270 gsi = gsi_last_bb (chkp_get_entry_block ());
1271 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1272 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1273 else
1274 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1275 }
1276 else
1277 {
1278 gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
1279 /* Sometimes (e.g. when we load a pointer from a
1280 memory) bounds are produced later than a pointer.
1281 We need to insert bounds copy appropriately. */
1282 if (gimple_code (bnd_def) != GIMPLE_NOP
1283 && stmt_dominates_stmt_p (def, bnd_def))
1284 gsi = gsi_for_stmt (bnd_def);
1285 else
1286 gsi = gsi_for_stmt (def);
1287 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1288 }
1289
1290 bnd = copy;
1291 }
1292
1293 if (abnormal_ptr)
1294 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1295 }
1296
1297 chkp_reg_bounds->put (ptr, bnd);
1298
1299 if (dump_file && (dump_flags & TDF_DETAILS))
1300 {
1301 fprintf (dump_file, "Regsitered bound ");
1302 print_generic_expr (dump_file, bnd, 0);
1303 fprintf (dump_file, " for pointer ");
1304 print_generic_expr (dump_file, ptr, 0);
1305 fprintf (dump_file, "\n");
1306 }
1307
1308 return bnd;
1309 }
1310
1311 /* Get bounds registered for object PTR in global bounds table. */
1312 static tree
1313 chkp_get_registered_bounds (tree ptr)
1314 {
1315 tree *slot;
1316
1317 if (!chkp_reg_bounds)
1318 return NULL_TREE;
1319
1320 slot = chkp_reg_bounds->get (ptr);
1321 return slot ? *slot : NULL_TREE;
1322 }
1323
1324 /* Add bound retvals to return statement pointed by GSI. */
1325
1326 static void
1327 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1328 {
1329 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1330 tree retval = gimple_return_retval (ret);
1331 tree ret_decl = DECL_RESULT (cfun->decl);
1332 tree bounds;
1333
1334 if (!retval)
1335 return;
1336
1337 if (BOUNDED_P (ret_decl))
1338 {
1339 bounds = chkp_find_bounds (retval, gsi);
1340 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1341 gimple_return_set_retbnd (ret, bounds);
1342 }
1343
1344 update_stmt (ret);
1345 }
1346
1347 /* Force OP to be suitable for using as an argument for call.
1348 New statements (if any) go to SEQ. */
1349 static tree
1350 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1351 {
1352 gimple_seq stmts;
1353 gimple_stmt_iterator si;
1354
1355 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1356
1357 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1358 chkp_mark_stmt (gsi_stmt (si));
1359
1360 gimple_seq_add_seq (seq, stmts);
1361
1362 return op;
1363 }
1364
1365 /* Generate lower bound check for memory access by ADDR.
1366 Check is inserted before the position pointed by ITER.
1367 DIRFLAG indicates whether memory access is load or store. */
1368 static void
1369 chkp_check_lower (tree addr, tree bounds,
1370 gimple_stmt_iterator iter,
1371 location_t location,
1372 tree dirflag)
1373 {
1374 gimple_seq seq;
1375 gimple check;
1376 tree node;
1377
1378 if (!chkp_function_instrumented_p (current_function_decl)
1379 && bounds == chkp_get_zero_bounds ())
1380 return;
1381
1382 if (dirflag == integer_zero_node
1383 && !flag_chkp_check_read)
1384 return;
1385
1386 if (dirflag == integer_one_node
1387 && !flag_chkp_check_write)
1388 return;
1389
1390 seq = NULL;
1391
1392 node = chkp_force_gimple_call_op (addr, &seq);
1393
1394 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1395 chkp_mark_stmt (check);
1396 gimple_call_set_with_bounds (check, true);
1397 gimple_set_location (check, location);
1398 gimple_seq_add_stmt (&seq, check);
1399
1400 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1401
1402 if (dump_file && (dump_flags & TDF_DETAILS))
1403 {
1404 gimple before = gsi_stmt (iter);
1405 fprintf (dump_file, "Generated lower bound check for statement ");
1406 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1407 fprintf (dump_file, " ");
1408 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1409 }
1410 }
1411
1412 /* Generate upper bound check for memory access by ADDR.
1413 Check is inserted before the position pointed by ITER.
1414 DIRFLAG indicates whether memory access is load or store. */
1415 static void
1416 chkp_check_upper (tree addr, tree bounds,
1417 gimple_stmt_iterator iter,
1418 location_t location,
1419 tree dirflag)
1420 {
1421 gimple_seq seq;
1422 gimple check;
1423 tree node;
1424
1425 if (!chkp_function_instrumented_p (current_function_decl)
1426 && bounds == chkp_get_zero_bounds ())
1427 return;
1428
1429 if (dirflag == integer_zero_node
1430 && !flag_chkp_check_read)
1431 return;
1432
1433 if (dirflag == integer_one_node
1434 && !flag_chkp_check_write)
1435 return;
1436
1437 seq = NULL;
1438
1439 node = chkp_force_gimple_call_op (addr, &seq);
1440
1441 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1442 chkp_mark_stmt (check);
1443 gimple_call_set_with_bounds (check, true);
1444 gimple_set_location (check, location);
1445 gimple_seq_add_stmt (&seq, check);
1446
1447 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1448
1449 if (dump_file && (dump_flags & TDF_DETAILS))
1450 {
1451 gimple before = gsi_stmt (iter);
1452 fprintf (dump_file, "Generated upper bound check for statement ");
1453 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1454 fprintf (dump_file, " ");
1455 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1456 }
1457 }
1458
1459 /* Generate lower and upper bound checks for memory access
1460 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1461 are inserted before the position pointed by ITER.
1462 DIRFLAG indicates whether memory access is load or store. */
1463 void
1464 chkp_check_mem_access (tree first, tree last, tree bounds,
1465 gimple_stmt_iterator iter,
1466 location_t location,
1467 tree dirflag)
1468 {
1469 chkp_check_lower (first, bounds, iter, location, dirflag);
1470 chkp_check_upper (last, bounds, iter, location, dirflag);
1471 }
1472
1473 /* Replace call to _bnd_chk_* pointed by GSI with
1474 bndcu and bndcl calls. DIRFLAG determines whether
1475 check is for read or write. */
1476
1477 void
1478 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1479 tree dirflag)
1480 {
1481 gimple_stmt_iterator call_iter = *gsi;
1482 gimple call = gsi_stmt (*gsi);
1483 tree fndecl = gimple_call_fndecl (call);
1484 tree addr = gimple_call_arg (call, 0);
1485 tree bounds = chkp_find_bounds (addr, gsi);
1486
1487 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1488 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1489 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1490
1491 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1492 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1493
1494 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1495 {
1496 tree size = gimple_call_arg (call, 1);
1497 addr = fold_build_pointer_plus (addr, size);
1498 addr = fold_build_pointer_plus_hwi (addr, -1);
1499 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1500 }
1501
1502 gsi_remove (&call_iter, true);
1503 }
1504
1505 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1506 corresponding bounds extract call. */
1507
1508 void
1509 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1510 {
1511 gimple call = gsi_stmt (*gsi);
1512 tree fndecl = gimple_call_fndecl (call);
1513 tree addr = gimple_call_arg (call, 0);
1514 tree bounds = chkp_find_bounds (addr, gsi);
1515 gimple extract;
1516
1517 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1518 fndecl = chkp_extract_lower_fndecl;
1519 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1520 fndecl = chkp_extract_upper_fndecl;
1521 else
1522 gcc_unreachable ();
1523
1524 extract = gimple_build_call (fndecl, 1, bounds);
1525 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1526 chkp_mark_stmt (extract);
1527
1528 gsi_replace (gsi, extract, false);
1529 }
1530
1531 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1532 static tree
1533 chkp_build_component_ref (tree obj, tree field)
1534 {
1535 tree res;
1536
1537 /* If object is TMR then we do not use component_ref but
1538 add offset instead. We need it to be able to get addr
1539 of the reasult later. */
1540 if (TREE_CODE (obj) == TARGET_MEM_REF)
1541 {
1542 tree offs = TMR_OFFSET (obj);
1543 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1544 offs, DECL_FIELD_OFFSET (field));
1545
1546 gcc_assert (offs);
1547
1548 res = copy_node (obj);
1549 TREE_TYPE (res) = TREE_TYPE (field);
1550 TMR_OFFSET (res) = offs;
1551 }
1552 else
1553 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1554
1555 return res;
1556 }
1557
1558 /* Return ARRAY_REF for array ARR and index IDX with
1559 specified element type ETYPE and element size ESIZE. */
1560 static tree
1561 chkp_build_array_ref (tree arr, tree etype, tree esize,
1562 unsigned HOST_WIDE_INT idx)
1563 {
1564 tree index = build_int_cst (size_type_node, idx);
1565 tree res;
1566
1567 /* If object is TMR then we do not use array_ref but
1568 add offset instead. We need it to be able to get addr
1569 of the reasult later. */
1570 if (TREE_CODE (arr) == TARGET_MEM_REF)
1571 {
1572 tree offs = TMR_OFFSET (arr);
1573
1574 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1575 esize, index);
1576 gcc_assert(esize);
1577
1578 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1579 offs, esize);
1580 gcc_assert (offs);
1581
1582 res = copy_node (arr);
1583 TREE_TYPE (res) = etype;
1584 TMR_OFFSET (res) = offs;
1585 }
1586 else
1587 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1588
1589 return res;
1590 }
1591
1592 /* Helper function for chkp_add_bounds_to_call_stmt.
1593 Fill ALL_BOUNDS output array with created bounds.
1594
1595 OFFS is used for recursive calls and holds basic
1596 offset of TYPE in outer structure in bits.
1597
1598 ITER points a position where bounds are searched.
1599
1600 ALL_BOUNDS[i] is filled with elem bounds if there
1601 is a field in TYPE which has pointer type and offset
1602 equal to i * POINTER_SIZE in bits. */
1603 static void
1604 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1605 HOST_WIDE_INT offs,
1606 gimple_stmt_iterator *iter)
1607 {
1608 tree type = TREE_TYPE (elem);
1609
1610 if (BOUNDED_TYPE_P (type))
1611 {
1612 if (!all_bounds[offs / POINTER_SIZE])
1613 {
1614 tree temp = make_temp_ssa_name (type, NULL, "");
1615 gimple assign = gimple_build_assign (temp, elem);
1616 gimple_stmt_iterator gsi;
1617
1618 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1619 gsi = gsi_for_stmt (assign);
1620
1621 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1622 }
1623 }
1624 else if (RECORD_OR_UNION_TYPE_P (type))
1625 {
1626 tree field;
1627
1628 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1629 if (TREE_CODE (field) == FIELD_DECL)
1630 {
1631 tree base = unshare_expr (elem);
1632 tree field_ref = chkp_build_component_ref (base, field);
1633 HOST_WIDE_INT field_offs
1634 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1635 if (DECL_FIELD_OFFSET (field))
1636 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1637
1638 chkp_find_bounds_for_elem (field_ref, all_bounds,
1639 offs + field_offs, iter);
1640 }
1641 }
1642 else if (TREE_CODE (type) == ARRAY_TYPE)
1643 {
1644 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1645 tree etype = TREE_TYPE (type);
1646 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1647 unsigned HOST_WIDE_INT cur;
1648
1649 if (!maxval || integer_minus_onep (maxval))
1650 return;
1651
1652 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1653 {
1654 tree base = unshare_expr (elem);
1655 tree arr_elem = chkp_build_array_ref (base, etype,
1656 TYPE_SIZE (etype),
1657 cur);
1658 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1659 iter);
1660 }
1661 }
1662 }
1663
1664 /* Fill HAVE_BOUND output bitmap with information about
1665 bounds requred for object of type TYPE.
1666
1667 OFFS is used for recursive calls and holds basic
1668 offset of TYPE in outer structure in bits.
1669
1670 HAVE_BOUND[i] is set to 1 if there is a field
1671 in TYPE which has pointer type and offset
1672 equal to i * POINTER_SIZE - OFFS in bits. */
1673 void
1674 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1675 HOST_WIDE_INT offs)
1676 {
1677 if (BOUNDED_TYPE_P (type))
1678 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1679 else if (RECORD_OR_UNION_TYPE_P (type))
1680 {
1681 tree field;
1682
1683 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1684 if (TREE_CODE (field) == FIELD_DECL)
1685 {
1686 HOST_WIDE_INT field_offs
1687 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1688 if (DECL_FIELD_OFFSET (field))
1689 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1690 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1691 offs + field_offs);
1692 }
1693 }
1694 else if (TREE_CODE (type) == ARRAY_TYPE)
1695 {
1696 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1697 tree etype = TREE_TYPE (type);
1698 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1699 unsigned HOST_WIDE_INT cur;
1700
1701 if (!maxval
1702 || TREE_CODE (maxval) != INTEGER_CST
1703 || integer_minus_onep (maxval))
1704 return;
1705
1706 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1707 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1708 }
1709 }
1710
1711 /* Fill bitmap RES with information about bounds for
1712 type TYPE. See chkp_find_bound_slots_1 for more
1713 details. */
1714 void
1715 chkp_find_bound_slots (const_tree type, bitmap res)
1716 {
1717 bitmap_clear (res);
1718 chkp_find_bound_slots_1 (type, res, 0);
1719 }
1720
1721 /* Return 1 if call to FNDECL should be instrumented
1722 and 0 otherwise. */
1723
1724 static bool
1725 chkp_instrument_normal_builtin (tree fndecl)
1726 {
1727 switch (DECL_FUNCTION_CODE (fndecl))
1728 {
1729 case BUILT_IN_STRLEN:
1730 case BUILT_IN_STRCPY:
1731 case BUILT_IN_STRNCPY:
1732 case BUILT_IN_STPCPY:
1733 case BUILT_IN_STPNCPY:
1734 case BUILT_IN_STRCAT:
1735 case BUILT_IN_STRNCAT:
1736 case BUILT_IN_MEMCPY:
1737 case BUILT_IN_MEMPCPY:
1738 case BUILT_IN_MEMSET:
1739 case BUILT_IN_MEMMOVE:
1740 case BUILT_IN_BZERO:
1741 case BUILT_IN_STRCMP:
1742 case BUILT_IN_STRNCMP:
1743 case BUILT_IN_BCMP:
1744 case BUILT_IN_MEMCMP:
1745 case BUILT_IN_MEMCPY_CHK:
1746 case BUILT_IN_MEMPCPY_CHK:
1747 case BUILT_IN_MEMMOVE_CHK:
1748 case BUILT_IN_MEMSET_CHK:
1749 case BUILT_IN_STRCPY_CHK:
1750 case BUILT_IN_STRNCPY_CHK:
1751 case BUILT_IN_STPCPY_CHK:
1752 case BUILT_IN_STPNCPY_CHK:
1753 case BUILT_IN_STRCAT_CHK:
1754 case BUILT_IN_STRNCAT_CHK:
1755 case BUILT_IN_MALLOC:
1756 case BUILT_IN_CALLOC:
1757 case BUILT_IN_REALLOC:
1758 return 1;
1759
1760 default:
1761 return 0;
1762 }
1763 }
1764
1765 /* Add bound arguments to call statement pointed by GSI.
1766 Also performs a replacement of user checker builtins calls
1767 with internal ones. */
1768
1769 static void
1770 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1771 {
1772 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1773 unsigned arg_no = 0;
1774 tree fndecl = gimple_call_fndecl (call);
1775 tree fntype;
1776 tree first_formal_arg;
1777 tree arg;
1778 bool use_fntype = false;
1779 tree op;
1780 ssa_op_iter iter;
1781 gcall *new_call;
1782
1783 /* Do nothing for internal functions. */
1784 if (gimple_call_internal_p (call))
1785 return;
1786
1787 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1788
1789 /* Do nothing if back-end builtin is called. */
1790 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1791 return;
1792
1793 /* Do nothing for some middle-end builtins. */
1794 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1795 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1796 return;
1797
1798 /* Do nothing for calls to not instrumentable functions. */
1799 if (fndecl && !chkp_instrumentable_p (fndecl))
1800 return;
1801
1802 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1803 and CHKP_COPY_PTR_BOUNDS. */
1804 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1805 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1806 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1807 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1808 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1809 return;
1810
1811 /* Check user builtins are replaced with checks. */
1812 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1813 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1814 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1815 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1816 {
1817 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1818 return;
1819 }
1820
1821 /* Check user builtins are replaced with bound extract. */
1822 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1823 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1824 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1825 {
1826 chkp_replace_extract_builtin (gsi);
1827 return;
1828 }
1829
1830 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1831 target narrow bounds call. */
1832 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1833 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1834 {
1835 tree arg = gimple_call_arg (call, 1);
1836 tree bounds = chkp_find_bounds (arg, gsi);
1837
1838 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1839 gimple_call_set_arg (call, 1, bounds);
1840 update_stmt (call);
1841
1842 return;
1843 }
1844
1845 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1846 bndstx call. */
1847 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1848 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1849 {
1850 tree addr = gimple_call_arg (call, 0);
1851 tree ptr = gimple_call_arg (call, 1);
1852 tree bounds = chkp_find_bounds (ptr, gsi);
1853 gimple_stmt_iterator iter = gsi_for_stmt (call);
1854
1855 chkp_build_bndstx (addr, ptr, bounds, gsi);
1856 gsi_remove (&iter, true);
1857
1858 return;
1859 }
1860
1861 if (!flag_chkp_instrument_calls)
1862 return;
1863
1864 /* We instrument only some subset of builtins. We also instrument
1865 builtin calls to be inlined. */
1866 if (fndecl
1867 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1868 && !chkp_instrument_normal_builtin (fndecl))
1869 {
1870 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1871 return;
1872
1873 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1874 if (!clone
1875 || !gimple_has_body_p (clone->decl))
1876 return;
1877 }
1878
1879 /* If function decl is available then use it for
1880 formal arguments list. Otherwise use function type. */
1881 if (fndecl && DECL_ARGUMENTS (fndecl))
1882 first_formal_arg = DECL_ARGUMENTS (fndecl);
1883 else
1884 {
1885 first_formal_arg = TYPE_ARG_TYPES (fntype);
1886 use_fntype = true;
1887 }
1888
1889 /* Fill vector of new call args. */
1890 vec<tree> new_args = vNULL;
1891 new_args.create (gimple_call_num_args (call));
1892 arg = first_formal_arg;
1893 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1894 {
1895 tree call_arg = gimple_call_arg (call, arg_no);
1896 tree type;
1897
1898 /* Get arg type using formal argument description
1899 or actual argument type. */
1900 if (arg)
1901 if (use_fntype)
1902 if (TREE_VALUE (arg) != void_type_node)
1903 {
1904 type = TREE_VALUE (arg);
1905 arg = TREE_CHAIN (arg);
1906 }
1907 else
1908 type = TREE_TYPE (call_arg);
1909 else
1910 {
1911 type = TREE_TYPE (arg);
1912 arg = TREE_CHAIN (arg);
1913 }
1914 else
1915 type = TREE_TYPE (call_arg);
1916
1917 new_args.safe_push (call_arg);
1918
1919 if (BOUNDED_TYPE_P (type)
1920 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1921 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1922 else if (chkp_type_has_pointer (type))
1923 {
1924 HOST_WIDE_INT max_bounds
1925 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1926 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1927 HOST_WIDE_INT bnd_no;
1928
1929 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1930
1931 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1932
1933 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1934 if (all_bounds[bnd_no])
1935 new_args.safe_push (all_bounds[bnd_no]);
1936
1937 free (all_bounds);
1938 }
1939 }
1940
1941 if (new_args.length () == gimple_call_num_args (call))
1942 new_call = call;
1943 else
1944 {
1945 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1946 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1947 gimple_call_copy_flags (new_call, call);
1948 gimple_call_set_chain (new_call, gimple_call_chain (call));
1949 }
1950 new_args.release ();
1951
1952 /* For direct calls fndecl is replaced with instrumented version. */
1953 if (fndecl)
1954 {
1955 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1956 gimple_call_set_fndecl (new_call, new_decl);
1957 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1958 }
1959 /* For indirect call we should fix function pointer type if
1960 pass some bounds. */
1961 else if (new_call != call)
1962 {
1963 tree type = gimple_call_fntype (call);
1964 type = chkp_copy_function_type_adding_bounds (type);
1965 gimple_call_set_fntype (new_call, type);
1966 }
1967
1968 /* replace old call statement with the new one. */
1969 if (call != new_call)
1970 {
1971 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1972 {
1973 SSA_NAME_DEF_STMT (op) = new_call;
1974 }
1975 gsi_replace (gsi, new_call, true);
1976 }
1977 else
1978 update_stmt (new_call);
1979
1980 gimple_call_set_with_bounds (new_call, true);
1981 }
1982
1983 /* Return constant static bounds var with specified bounds LB and UB.
1984 If such var does not exists then new var is created with specified NAME. */
1985 static tree
1986 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1987 HOST_WIDE_INT ub,
1988 const char *name)
1989 {
1990 tree id = get_identifier (name);
1991 tree var;
1992 varpool_node *node;
1993 symtab_node *snode;
1994
1995 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1996 pointer_bounds_type_node);
1997 TREE_STATIC (var) = 1;
1998 TREE_PUBLIC (var) = 1;
1999
2000 /* With LTO we may have constant bounds already in varpool.
2001 Try to find it. */
2002 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
2003 {
2004 /* We don't allow this symbol usage for non bounds. */
2005 if (snode->type != SYMTAB_VARIABLE
2006 || !POINTER_BOUNDS_P (snode->decl))
2007 sorry ("-fcheck-pointer-bounds requires '%s' "
2008 "name for internal usage",
2009 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
2010
2011 return snode->decl;
2012 }
2013
2014 TREE_USED (var) = 1;
2015 TREE_READONLY (var) = 1;
2016 TREE_ADDRESSABLE (var) = 0;
2017 DECL_ARTIFICIAL (var) = 1;
2018 DECL_READ_P (var) = 1;
2019 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2020 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2021 /* We may use this symbol during ctors generation in chkp_finish_file
2022 when all symbols are emitted. Force output to avoid undefined
2023 symbols in ctors. */
2024 node = varpool_node::get_create (var);
2025 node->force_output = 1;
2026
2027 varpool_node::finalize_decl (var);
2028
2029 return var;
2030 }
2031
2032 /* Generate code to make bounds with specified lower bound LB and SIZE.
2033 if AFTER is 1 then code is inserted after position pointed by ITER
2034 otherwise code is inserted before position pointed by ITER.
2035 If ITER is NULL then code is added to entry block. */
2036 static tree
2037 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2038 {
2039 gimple_seq seq;
2040 gimple_stmt_iterator gsi;
2041 gimple stmt;
2042 tree bounds;
2043
2044 if (iter)
2045 gsi = *iter;
2046 else
2047 gsi = gsi_start_bb (chkp_get_entry_block ());
2048
2049 seq = NULL;
2050
2051 lb = chkp_force_gimple_call_op (lb, &seq);
2052 size = chkp_force_gimple_call_op (size, &seq);
2053
2054 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2055 chkp_mark_stmt (stmt);
2056
2057 bounds = chkp_get_tmp_reg (stmt);
2058 gimple_call_set_lhs (stmt, bounds);
2059
2060 gimple_seq_add_stmt (&seq, stmt);
2061
2062 if (iter && after)
2063 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2064 else
2065 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2066
2067 if (dump_file && (dump_flags & TDF_DETAILS))
2068 {
2069 fprintf (dump_file, "Made bounds: ");
2070 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2071 if (iter)
2072 {
2073 fprintf (dump_file, " inserted before statement: ");
2074 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2075 }
2076 else
2077 fprintf (dump_file, " at function entry\n");
2078 }
2079
2080 /* update_stmt (stmt); */
2081
2082 return bounds;
2083 }
2084
2085 /* Return var holding zero bounds. */
2086 tree
2087 chkp_get_zero_bounds_var (void)
2088 {
2089 if (!chkp_zero_bounds_var)
2090 chkp_zero_bounds_var
2091 = chkp_make_static_const_bounds (0, -1,
2092 CHKP_ZERO_BOUNDS_VAR_NAME);
2093 return chkp_zero_bounds_var;
2094 }
2095
2096 /* Return var holding none bounds. */
2097 tree
2098 chkp_get_none_bounds_var (void)
2099 {
2100 if (!chkp_none_bounds_var)
2101 chkp_none_bounds_var
2102 = chkp_make_static_const_bounds (-1, 0,
2103 CHKP_NONE_BOUNDS_VAR_NAME);
2104 return chkp_none_bounds_var;
2105 }
2106
2107 /* Return SSA_NAME used to represent zero bounds. */
2108 static tree
2109 chkp_get_zero_bounds (void)
2110 {
2111 if (zero_bounds)
2112 return zero_bounds;
2113
2114 if (dump_file && (dump_flags & TDF_DETAILS))
2115 fprintf (dump_file, "Creating zero bounds...");
2116
2117 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2118 || flag_chkp_use_static_const_bounds > 0)
2119 {
2120 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2121 gimple stmt;
2122
2123 zero_bounds = chkp_get_tmp_reg (NULL);
2124 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2125 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2126 }
2127 else
2128 zero_bounds = chkp_make_bounds (integer_zero_node,
2129 integer_zero_node,
2130 NULL,
2131 false);
2132
2133 return zero_bounds;
2134 }
2135
2136 /* Return SSA_NAME used to represent none bounds. */
2137 static tree
2138 chkp_get_none_bounds (void)
2139 {
2140 if (none_bounds)
2141 return none_bounds;
2142
2143 if (dump_file && (dump_flags & TDF_DETAILS))
2144 fprintf (dump_file, "Creating none bounds...");
2145
2146
2147 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2148 || flag_chkp_use_static_const_bounds > 0)
2149 {
2150 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2151 gimple stmt;
2152
2153 none_bounds = chkp_get_tmp_reg (NULL);
2154 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2155 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2156 }
2157 else
2158 none_bounds = chkp_make_bounds (integer_minus_one_node,
2159 build_int_cst (size_type_node, 2),
2160 NULL,
2161 false);
2162
2163 return none_bounds;
2164 }
2165
2166 /* Return bounds to be used as a result of operation which
2167 should not create poiunter (e.g. MULT_EXPR). */
2168 static tree
2169 chkp_get_invalid_op_bounds (void)
2170 {
2171 return chkp_get_zero_bounds ();
2172 }
2173
2174 /* Return bounds to be used for loads of non-pointer values. */
2175 static tree
2176 chkp_get_nonpointer_load_bounds (void)
2177 {
2178 return chkp_get_zero_bounds ();
2179 }
2180
2181 /* Return 1 if may use bndret call to get bounds for pointer
2182 returned by CALL. */
2183 static bool
2184 chkp_call_returns_bounds_p (gcall *call)
2185 {
2186 if (gimple_call_internal_p (call))
2187 return false;
2188
2189 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2190 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2191 return true;
2192
2193 if (gimple_call_with_bounds_p (call))
2194 return true;
2195
2196 tree fndecl = gimple_call_fndecl (call);
2197
2198 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2199 return false;
2200
2201 if (fndecl && !chkp_instrumentable_p (fndecl))
2202 return false;
2203
2204 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2205 {
2206 if (chkp_instrument_normal_builtin (fndecl))
2207 return true;
2208
2209 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2210 return false;
2211
2212 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2213 return (clone && gimple_has_body_p (clone->decl));
2214 }
2215
2216 return true;
2217 }
2218
2219 /* Build bounds returned by CALL. */
2220 static tree
2221 chkp_build_returned_bound (gcall *call)
2222 {
2223 gimple_stmt_iterator gsi;
2224 tree bounds;
2225 gimple stmt;
2226 tree fndecl = gimple_call_fndecl (call);
2227 unsigned int retflags;
2228
2229 /* To avoid fixing alloca expands in targets we handle
2230 it separately. */
2231 if (fndecl
2232 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2233 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2234 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2235 {
2236 tree size = gimple_call_arg (call, 0);
2237 tree lb = gimple_call_lhs (call);
2238 gimple_stmt_iterator iter = gsi_for_stmt (call);
2239 bounds = chkp_make_bounds (lb, size, &iter, true);
2240 }
2241 /* We know bounds returned by set_bounds builtin call. */
2242 else if (fndecl
2243 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2244 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2245 {
2246 tree lb = gimple_call_arg (call, 0);
2247 tree size = gimple_call_arg (call, 1);
2248 gimple_stmt_iterator iter = gsi_for_stmt (call);
2249 bounds = chkp_make_bounds (lb, size, &iter, true);
2250 }
2251 /* Detect bounds initialization calls. */
2252 else if (fndecl
2253 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2254 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2255 bounds = chkp_get_zero_bounds ();
2256 /* Detect bounds nullification calls. */
2257 else if (fndecl
2258 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2259 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2260 bounds = chkp_get_none_bounds ();
2261 /* Detect bounds copy calls. */
2262 else if (fndecl
2263 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2264 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2265 {
2266 gimple_stmt_iterator iter = gsi_for_stmt (call);
2267 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2268 }
2269 /* Do not use retbnd when returned bounds are equal to some
2270 of passed bounds. */
2271 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2272 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2273 {
2274 gimple_stmt_iterator iter = gsi_for_stmt (call);
2275 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2276 if (gimple_call_with_bounds_p (call))
2277 {
2278 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2279 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2280 {
2281 if (retarg)
2282 retarg--;
2283 else
2284 break;
2285 }
2286 }
2287 else
2288 argno = retarg;
2289
2290 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2291 }
2292 else if (chkp_call_returns_bounds_p (call))
2293 {
2294 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2295
2296 /* In general case build checker builtin call to
2297 obtain returned bounds. */
2298 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2299 gimple_call_lhs (call));
2300 chkp_mark_stmt (stmt);
2301
2302 gsi = gsi_for_stmt (call);
2303 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2304
2305 bounds = chkp_get_tmp_reg (stmt);
2306 gimple_call_set_lhs (stmt, bounds);
2307
2308 update_stmt (stmt);
2309 }
2310 else
2311 bounds = chkp_get_zero_bounds ();
2312
2313 if (dump_file && (dump_flags & TDF_DETAILS))
2314 {
2315 fprintf (dump_file, "Built returned bounds (");
2316 print_generic_expr (dump_file, bounds, 0);
2317 fprintf (dump_file, ") for call: ");
2318 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2319 }
2320
2321 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2322
2323 return bounds;
2324 }
2325
2326 /* Return bounds used as returned by call
2327 which produced SSA name VAL. */
2328 gcall *
2329 chkp_retbnd_call_by_val (tree val)
2330 {
2331 if (TREE_CODE (val) != SSA_NAME)
2332 return NULL;
2333
2334 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2335
2336 imm_use_iterator use_iter;
2337 use_operand_p use_p;
2338 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2339 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2340 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2341 return as_a <gcall *> (USE_STMT (use_p));
2342
2343 return NULL;
2344 }
2345
2346 /* Check the next parameter for the given PARM is bounds
2347 and return it's default SSA_NAME (create if required). */
2348 static tree
2349 chkp_get_next_bounds_parm (tree parm)
2350 {
2351 tree bounds = TREE_CHAIN (parm);
2352 gcc_assert (POINTER_BOUNDS_P (bounds));
2353 bounds = ssa_default_def (cfun, bounds);
2354 if (!bounds)
2355 {
2356 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2357 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2358 }
2359 return bounds;
2360 }
2361
2362 /* Return bounds to be used for input argument PARM. */
2363 static tree
2364 chkp_get_bound_for_parm (tree parm)
2365 {
2366 tree decl = SSA_NAME_VAR (parm);
2367 tree bounds;
2368
2369 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2370
2371 bounds = chkp_get_registered_bounds (parm);
2372
2373 if (!bounds)
2374 bounds = chkp_get_registered_bounds (decl);
2375
2376 if (!bounds)
2377 {
2378 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2379
2380 /* For static chain param we return zero bounds
2381 because currently we do not check dereferences
2382 of this pointer. */
2383 if (cfun->static_chain_decl == decl)
2384 bounds = chkp_get_zero_bounds ();
2385 /* If non instrumented runtime is used then it may be useful
2386 to use zero bounds for input arguments of main
2387 function. */
2388 else if (flag_chkp_zero_input_bounds_for_main
2389 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2390 "main") == 0)
2391 bounds = chkp_get_zero_bounds ();
2392 else if (BOUNDED_P (parm))
2393 {
2394 bounds = chkp_get_next_bounds_parm (decl);
2395 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2396
2397 if (dump_file && (dump_flags & TDF_DETAILS))
2398 {
2399 fprintf (dump_file, "Built arg bounds (");
2400 print_generic_expr (dump_file, bounds, 0);
2401 fprintf (dump_file, ") for arg: ");
2402 print_node (dump_file, "", decl, 0);
2403 }
2404 }
2405 else
2406 bounds = chkp_get_zero_bounds ();
2407 }
2408
2409 if (!chkp_get_registered_bounds (parm))
2410 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2411
2412 if (dump_file && (dump_flags & TDF_DETAILS))
2413 {
2414 fprintf (dump_file, "Using bounds ");
2415 print_generic_expr (dump_file, bounds, 0);
2416 fprintf (dump_file, " for parm ");
2417 print_generic_expr (dump_file, parm, 0);
2418 fprintf (dump_file, " of type ");
2419 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2420 fprintf (dump_file, ".\n");
2421 }
2422
2423 return bounds;
2424 }
2425
2426 /* Build and return CALL_EXPR for bndstx builtin with specified
2427 arguments. */
2428 tree
2429 chkp_build_bndldx_call (tree addr, tree ptr)
2430 {
2431 tree fn = build1 (ADDR_EXPR,
2432 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2433 chkp_bndldx_fndecl);
2434 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2435 fn, 2, addr, ptr);
2436 CALL_WITH_BOUNDS_P (call) = true;
2437 return call;
2438 }
2439
2440 /* Insert code to load bounds for PTR located by ADDR.
2441 Code is inserted after position pointed by GSI.
2442 Loaded bounds are returned. */
2443 static tree
2444 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2445 {
2446 gimple_seq seq;
2447 gimple stmt;
2448 tree bounds;
2449
2450 seq = NULL;
2451
2452 addr = chkp_force_gimple_call_op (addr, &seq);
2453 ptr = chkp_force_gimple_call_op (ptr, &seq);
2454
2455 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2456 chkp_mark_stmt (stmt);
2457 bounds = chkp_get_tmp_reg (stmt);
2458 gimple_call_set_lhs (stmt, bounds);
2459
2460 gimple_seq_add_stmt (&seq, stmt);
2461
2462 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2463
2464 if (dump_file && (dump_flags & TDF_DETAILS))
2465 {
2466 fprintf (dump_file, "Generated bndldx for pointer ");
2467 print_generic_expr (dump_file, ptr, 0);
2468 fprintf (dump_file, ": ");
2469 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2470 }
2471
2472 return bounds;
2473 }
2474
2475 /* Build and return CALL_EXPR for bndstx builtin with specified
2476 arguments. */
2477 tree
2478 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2479 {
2480 tree fn = build1 (ADDR_EXPR,
2481 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2482 chkp_bndstx_fndecl);
2483 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2484 fn, 3, ptr, bounds, addr);
2485 CALL_WITH_BOUNDS_P (call) = true;
2486 return call;
2487 }
2488
2489 /* Insert code to store BOUNDS for PTR stored by ADDR.
2490 New statements are inserted after position pointed
2491 by GSI. */
2492 void
2493 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2494 gimple_stmt_iterator *gsi)
2495 {
2496 gimple_seq seq;
2497 gimple stmt;
2498
2499 seq = NULL;
2500
2501 addr = chkp_force_gimple_call_op (addr, &seq);
2502 ptr = chkp_force_gimple_call_op (ptr, &seq);
2503
2504 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2505 chkp_mark_stmt (stmt);
2506 gimple_call_set_with_bounds (stmt, true);
2507
2508 gimple_seq_add_stmt (&seq, stmt);
2509
2510 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2511
2512 if (dump_file && (dump_flags & TDF_DETAILS))
2513 {
2514 fprintf (dump_file, "Generated bndstx for pointer store ");
2515 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2516 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2517 }
2518 }
2519
2520 /* Compute bounds for pointer NODE which was assigned in
2521 assignment statement ASSIGN. Return computed bounds. */
2522 static tree
2523 chkp_compute_bounds_for_assignment (tree node, gimple assign)
2524 {
2525 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2526 tree rhs1 = gimple_assign_rhs1 (assign);
2527 tree bounds = NULL_TREE;
2528 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2529 tree base = NULL;
2530
2531 if (dump_file && (dump_flags & TDF_DETAILS))
2532 {
2533 fprintf (dump_file, "Computing bounds for assignment: ");
2534 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2535 }
2536
2537 switch (rhs_code)
2538 {
2539 case MEM_REF:
2540 case TARGET_MEM_REF:
2541 case COMPONENT_REF:
2542 case ARRAY_REF:
2543 /* We need to load bounds from the bounds table. */
2544 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2545 break;
2546
2547 case VAR_DECL:
2548 case SSA_NAME:
2549 case ADDR_EXPR:
2550 case POINTER_PLUS_EXPR:
2551 case NOP_EXPR:
2552 case CONVERT_EXPR:
2553 case INTEGER_CST:
2554 /* Bounds are just propagated from RHS. */
2555 bounds = chkp_find_bounds (rhs1, &iter);
2556 base = rhs1;
2557 break;
2558
2559 case VIEW_CONVERT_EXPR:
2560 /* Bounds are just propagated from RHS. */
2561 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2562 break;
2563
2564 case PARM_DECL:
2565 if (BOUNDED_P (rhs1))
2566 {
2567 /* We need to load bounds from the bounds table. */
2568 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2569 node, &iter);
2570 TREE_ADDRESSABLE (rhs1) = 1;
2571 }
2572 else
2573 bounds = chkp_get_nonpointer_load_bounds ();
2574 break;
2575
2576 case MINUS_EXPR:
2577 case PLUS_EXPR:
2578 case BIT_AND_EXPR:
2579 case BIT_IOR_EXPR:
2580 case BIT_XOR_EXPR:
2581 {
2582 tree rhs2 = gimple_assign_rhs2 (assign);
2583 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2584 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2585
2586 /* First we try to check types of operands. If it
2587 does not help then look at bound values.
2588
2589 If some bounds are incomplete and other are
2590 not proven to be valid (i.e. also incomplete
2591 or invalid because value is not pointer) then
2592 resulting value is incomplete and will be
2593 recomputed later in chkp_finish_incomplete_bounds. */
2594 if (BOUNDED_P (rhs1)
2595 && !BOUNDED_P (rhs2))
2596 bounds = bnd1;
2597 else if (BOUNDED_P (rhs2)
2598 && !BOUNDED_P (rhs1)
2599 && rhs_code != MINUS_EXPR)
2600 bounds = bnd2;
2601 else if (chkp_incomplete_bounds (bnd1))
2602 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2603 && !chkp_incomplete_bounds (bnd2))
2604 bounds = bnd2;
2605 else
2606 bounds = incomplete_bounds;
2607 else if (chkp_incomplete_bounds (bnd2))
2608 if (chkp_valid_bounds (bnd1)
2609 && !chkp_incomplete_bounds (bnd1))
2610 bounds = bnd1;
2611 else
2612 bounds = incomplete_bounds;
2613 else if (!chkp_valid_bounds (bnd1))
2614 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2615 bounds = bnd2;
2616 else if (bnd2 == chkp_get_zero_bounds ())
2617 bounds = bnd2;
2618 else
2619 bounds = bnd1;
2620 else if (!chkp_valid_bounds (bnd2))
2621 bounds = bnd1;
2622 else
2623 /* Seems both operands may have valid bounds
2624 (e.g. pointer minus pointer). In such case
2625 use default invalid op bounds. */
2626 bounds = chkp_get_invalid_op_bounds ();
2627
2628 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2629 }
2630 break;
2631
2632 case BIT_NOT_EXPR:
2633 case NEGATE_EXPR:
2634 case LSHIFT_EXPR:
2635 case RSHIFT_EXPR:
2636 case LROTATE_EXPR:
2637 case RROTATE_EXPR:
2638 case EQ_EXPR:
2639 case NE_EXPR:
2640 case LT_EXPR:
2641 case LE_EXPR:
2642 case GT_EXPR:
2643 case GE_EXPR:
2644 case MULT_EXPR:
2645 case RDIV_EXPR:
2646 case TRUNC_DIV_EXPR:
2647 case FLOOR_DIV_EXPR:
2648 case CEIL_DIV_EXPR:
2649 case ROUND_DIV_EXPR:
2650 case TRUNC_MOD_EXPR:
2651 case FLOOR_MOD_EXPR:
2652 case CEIL_MOD_EXPR:
2653 case ROUND_MOD_EXPR:
2654 case EXACT_DIV_EXPR:
2655 case FIX_TRUNC_EXPR:
2656 case FLOAT_EXPR:
2657 case REALPART_EXPR:
2658 case IMAGPART_EXPR:
2659 /* No valid bounds may be produced by these exprs. */
2660 bounds = chkp_get_invalid_op_bounds ();
2661 break;
2662
2663 case COND_EXPR:
2664 {
2665 tree val1 = gimple_assign_rhs2 (assign);
2666 tree val2 = gimple_assign_rhs3 (assign);
2667 tree bnd1 = chkp_find_bounds (val1, &iter);
2668 tree bnd2 = chkp_find_bounds (val2, &iter);
2669 gimple stmt;
2670
2671 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2672 bounds = incomplete_bounds;
2673 else if (bnd1 == bnd2)
2674 bounds = bnd1;
2675 else
2676 {
2677 rhs1 = unshare_expr (rhs1);
2678
2679 bounds = chkp_get_tmp_reg (assign);
2680 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2681 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2682
2683 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2684 chkp_mark_invalid_bounds (bounds);
2685 }
2686 }
2687 break;
2688
2689 case MAX_EXPR:
2690 case MIN_EXPR:
2691 {
2692 tree rhs2 = gimple_assign_rhs2 (assign);
2693 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2694 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2695
2696 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2697 bounds = incomplete_bounds;
2698 else if (bnd1 == bnd2)
2699 bounds = bnd1;
2700 else
2701 {
2702 gimple stmt;
2703 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2704 boolean_type_node, rhs1, rhs2);
2705 bounds = chkp_get_tmp_reg (assign);
2706 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2707
2708 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2709
2710 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2711 chkp_mark_invalid_bounds (bounds);
2712 }
2713 }
2714 break;
2715
2716 default:
2717 bounds = chkp_get_zero_bounds ();
2718 warning (0, "pointer bounds were lost due to unexpected expression %s",
2719 get_tree_code_name (rhs_code));
2720 }
2721
2722 gcc_assert (bounds);
2723
2724 /* We may reuse bounds of other pointer we copy/modify. But it is not
2725 allowed for abnormal ssa names. If we produced a pointer using
2726 abnormal ssa name, we better make a bounds copy to avoid coalescing
2727 issues. */
2728 if (base
2729 && TREE_CODE (base) == SSA_NAME
2730 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2731 {
2732 gimple stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2733 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2734 bounds = gimple_assign_lhs (stmt);
2735 }
2736
2737 if (node)
2738 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2739
2740 return bounds;
2741 }
2742
2743 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2744
2745 There are just few statement codes allowed: NOP (for default ssa names),
2746 ASSIGN, CALL, PHI, ASM.
2747
2748 Return computed bounds. */
2749 static tree
2750 chkp_get_bounds_by_definition (tree node, gimple def_stmt,
2751 gphi_iterator *iter)
2752 {
2753 tree var, bounds;
2754 enum gimple_code code = gimple_code (def_stmt);
2755 gphi *stmt;
2756
2757 if (dump_file && (dump_flags & TDF_DETAILS))
2758 {
2759 fprintf (dump_file, "Searching for bounds for node: ");
2760 print_generic_expr (dump_file, node, 0);
2761
2762 fprintf (dump_file, " using its definition: ");
2763 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2764 }
2765
2766 switch (code)
2767 {
2768 case GIMPLE_NOP:
2769 var = SSA_NAME_VAR (node);
2770 switch (TREE_CODE (var))
2771 {
2772 case PARM_DECL:
2773 bounds = chkp_get_bound_for_parm (node);
2774 break;
2775
2776 case VAR_DECL:
2777 /* For uninitialized pointers use none bounds. */
2778 bounds = chkp_get_none_bounds ();
2779 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2780 break;
2781
2782 case RESULT_DECL:
2783 {
2784 tree base_type;
2785
2786 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2787
2788 base_type = TREE_TYPE (TREE_TYPE (node));
2789
2790 gcc_assert (TYPE_SIZE (base_type)
2791 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2792 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2793
2794 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2795 NULL, false);
2796 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2797 }
2798 break;
2799
2800 default:
2801 if (dump_file && (dump_flags & TDF_DETAILS))
2802 {
2803 fprintf (dump_file, "Unexpected var with no definition\n");
2804 print_generic_expr (dump_file, var, 0);
2805 }
2806 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2807 get_tree_code_name (TREE_CODE (var)));
2808 }
2809 break;
2810
2811 case GIMPLE_ASSIGN:
2812 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2813 break;
2814
2815 case GIMPLE_CALL:
2816 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2817 break;
2818
2819 case GIMPLE_PHI:
2820 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2821 if (SSA_NAME_VAR (node))
2822 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2823 else
2824 var = make_temp_ssa_name (pointer_bounds_type_node,
2825 NULL,
2826 CHKP_BOUND_TMP_NAME);
2827 else
2828 var = chkp_get_tmp_var ();
2829 stmt = create_phi_node (var, gimple_bb (def_stmt));
2830 bounds = gimple_phi_result (stmt);
2831 *iter = gsi_for_phi (stmt);
2832
2833 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2834
2835 /* Created bounds do not have all phi args computed and
2836 therefore we do not know if there is a valid source
2837 of bounds for that node. Therefore we mark bounds
2838 as incomplete and then recompute them when all phi
2839 args are computed. */
2840 chkp_register_incomplete_bounds (bounds, node);
2841 break;
2842
2843 case GIMPLE_ASM:
2844 bounds = chkp_get_zero_bounds ();
2845 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2846 break;
2847
2848 default:
2849 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2850 gimple_code_name[code]);
2851 }
2852
2853 return bounds;
2854 }
2855
2856 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2857 tree
2858 chkp_build_make_bounds_call (tree lower_bound, tree size)
2859 {
2860 tree call = build1 (ADDR_EXPR,
2861 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2862 chkp_bndmk_fndecl);
2863 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2864 call, 2, lower_bound, size);
2865 }
2866
2867 /* Create static bounds var of specfified OBJ which is
2868 is either VAR_DECL or string constant. */
2869 static tree
2870 chkp_make_static_bounds (tree obj)
2871 {
2872 static int string_id = 1;
2873 static int var_id = 1;
2874 tree *slot;
2875 const char *var_name;
2876 char *bnd_var_name;
2877 tree bnd_var;
2878
2879 /* First check if we already have required var. */
2880 if (chkp_static_var_bounds)
2881 {
2882 /* For vars we use assembler name as a key in
2883 chkp_static_var_bounds map. It allows to
2884 avoid duplicating bound vars for decls
2885 sharing assembler name. */
2886 if (TREE_CODE (obj) == VAR_DECL)
2887 {
2888 tree name = DECL_ASSEMBLER_NAME (obj);
2889 slot = chkp_static_var_bounds->get (name);
2890 if (slot)
2891 return *slot;
2892 }
2893 else
2894 {
2895 slot = chkp_static_var_bounds->get (obj);
2896 if (slot)
2897 return *slot;
2898 }
2899 }
2900
2901 /* Build decl for bounds var. */
2902 if (TREE_CODE (obj) == VAR_DECL)
2903 {
2904 if (DECL_IGNORED_P (obj))
2905 {
2906 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2907 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2908 }
2909 else
2910 {
2911 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2912
2913 /* For hidden symbols we want to skip first '*' char. */
2914 if (*var_name == '*')
2915 var_name++;
2916
2917 bnd_var_name = (char *) xmalloc (strlen (var_name)
2918 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2919 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2920 strcat (bnd_var_name, var_name);
2921 }
2922
2923 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2924 get_identifier (bnd_var_name),
2925 pointer_bounds_type_node);
2926
2927 /* Address of the obj will be used as lower bound. */
2928 TREE_ADDRESSABLE (obj) = 1;
2929 }
2930 else
2931 {
2932 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2933 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2934
2935 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2936 get_identifier (bnd_var_name),
2937 pointer_bounds_type_node);
2938 }
2939
2940 TREE_PUBLIC (bnd_var) = 0;
2941 TREE_USED (bnd_var) = 1;
2942 TREE_READONLY (bnd_var) = 0;
2943 TREE_STATIC (bnd_var) = 1;
2944 TREE_ADDRESSABLE (bnd_var) = 0;
2945 DECL_ARTIFICIAL (bnd_var) = 1;
2946 DECL_COMMON (bnd_var) = 1;
2947 DECL_COMDAT (bnd_var) = 1;
2948 DECL_READ_P (bnd_var) = 1;
2949 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
2950 /* Force output similar to constant bounds.
2951 See chkp_make_static_const_bounds. */
2952 varpool_node::get_create (bnd_var)->force_output = 1;
2953 /* Mark symbol as requiring bounds initialization. */
2954 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
2955 varpool_node::finalize_decl (bnd_var);
2956
2957 /* Add created var to the map to use it for other references
2958 to obj. */
2959 if (!chkp_static_var_bounds)
2960 chkp_static_var_bounds = new hash_map<tree, tree>;
2961
2962 if (TREE_CODE (obj) == VAR_DECL)
2963 {
2964 tree name = DECL_ASSEMBLER_NAME (obj);
2965 chkp_static_var_bounds->put (name, bnd_var);
2966 }
2967 else
2968 chkp_static_var_bounds->put (obj, bnd_var);
2969
2970 return bnd_var;
2971 }
2972
2973 /* When var has incomplete type we cannot get size to
2974 compute its bounds. In such cases we use checker
2975 builtin call which determines object size at runtime. */
2976 static tree
2977 chkp_generate_extern_var_bounds (tree var)
2978 {
2979 tree bounds, size_reloc, lb, size, max_size, cond;
2980 gimple_stmt_iterator gsi;
2981 gimple_seq seq = NULL;
2982 gimple stmt;
2983
2984 /* If instrumentation is not enabled for vars having
2985 incomplete type then just return zero bounds to avoid
2986 checks for this var. */
2987 if (!flag_chkp_incomplete_type)
2988 return chkp_get_zero_bounds ();
2989
2990 if (dump_file && (dump_flags & TDF_DETAILS))
2991 {
2992 fprintf (dump_file, "Generating bounds for extern symbol '");
2993 print_generic_expr (dump_file, var, 0);
2994 fprintf (dump_file, "'\n");
2995 }
2996
2997 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
2998
2999 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3000 gimple_call_set_lhs (stmt, size_reloc);
3001
3002 gimple_seq_add_stmt (&seq, stmt);
3003
3004 lb = chkp_build_addr_expr (var);
3005 size = make_ssa_name (chkp_get_size_tmp_var ());
3006
3007 if (flag_chkp_zero_dynamic_size_as_infinite)
3008 {
3009 /* We should check that size relocation was resolved.
3010 If it was not then use maximum possible size for the var. */
3011 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3012 fold_convert (chkp_uintptr_type, lb));
3013 max_size = chkp_force_gimple_call_op (max_size, &seq);
3014
3015 cond = build2 (NE_EXPR, boolean_type_node,
3016 size_reloc, integer_zero_node);
3017 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3018 gimple_seq_add_stmt (&seq, stmt);
3019 }
3020 else
3021 {
3022 stmt = gimple_build_assign (size, size_reloc);
3023 gimple_seq_add_stmt (&seq, stmt);
3024 }
3025
3026 gsi = gsi_start_bb (chkp_get_entry_block ());
3027 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3028
3029 bounds = chkp_make_bounds (lb, size, &gsi, true);
3030
3031 return bounds;
3032 }
3033
3034 /* Return 1 if TYPE has fields with zero size or fields
3035 marked with chkp_variable_size attribute. */
3036 bool
3037 chkp_variable_size_type (tree type)
3038 {
3039 bool res = false;
3040 tree field;
3041
3042 if (RECORD_OR_UNION_TYPE_P (type))
3043 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3044 {
3045 if (TREE_CODE (field) == FIELD_DECL)
3046 res = res
3047 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3048 || chkp_variable_size_type (TREE_TYPE (field));
3049 }
3050 else
3051 res = !TYPE_SIZE (type)
3052 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3053 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3054
3055 return res;
3056 }
3057
3058 /* Compute and return bounds for address of DECL which is
3059 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3060 static tree
3061 chkp_get_bounds_for_decl_addr (tree decl)
3062 {
3063 tree bounds;
3064
3065 gcc_assert (TREE_CODE (decl) == VAR_DECL
3066 || TREE_CODE (decl) == PARM_DECL
3067 || TREE_CODE (decl) == RESULT_DECL);
3068
3069 bounds = chkp_get_registered_addr_bounds (decl);
3070
3071 if (bounds)
3072 return bounds;
3073
3074 if (dump_file && (dump_flags & TDF_DETAILS))
3075 {
3076 fprintf (dump_file, "Building bounds for address of decl ");
3077 print_generic_expr (dump_file, decl, 0);
3078 fprintf (dump_file, "\n");
3079 }
3080
3081 /* Use zero bounds if size is unknown and checks for
3082 unknown sizes are restricted. */
3083 if ((!DECL_SIZE (decl)
3084 || (chkp_variable_size_type (TREE_TYPE (decl))
3085 && (TREE_STATIC (decl)
3086 || DECL_EXTERNAL (decl)
3087 || TREE_PUBLIC (decl))))
3088 && !flag_chkp_incomplete_type)
3089 return chkp_get_zero_bounds ();
3090
3091 if (flag_chkp_use_static_bounds
3092 && TREE_CODE (decl) == VAR_DECL
3093 && (TREE_STATIC (decl)
3094 || DECL_EXTERNAL (decl)
3095 || TREE_PUBLIC (decl))
3096 && !DECL_THREAD_LOCAL_P (decl))
3097 {
3098 tree bnd_var = chkp_make_static_bounds (decl);
3099 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3100 gimple stmt;
3101
3102 bounds = chkp_get_tmp_reg (NULL);
3103 stmt = gimple_build_assign (bounds, bnd_var);
3104 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3105 }
3106 else if (!DECL_SIZE (decl)
3107 || (chkp_variable_size_type (TREE_TYPE (decl))
3108 && (TREE_STATIC (decl)
3109 || DECL_EXTERNAL (decl)
3110 || TREE_PUBLIC (decl))))
3111 {
3112 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3113 bounds = chkp_generate_extern_var_bounds (decl);
3114 }
3115 else
3116 {
3117 tree lb = chkp_build_addr_expr (decl);
3118 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3119 }
3120
3121 return bounds;
3122 }
3123
3124 /* Compute and return bounds for constant string. */
3125 static tree
3126 chkp_get_bounds_for_string_cst (tree cst)
3127 {
3128 tree bounds;
3129 tree lb;
3130 tree size;
3131
3132 gcc_assert (TREE_CODE (cst) == STRING_CST);
3133
3134 bounds = chkp_get_registered_bounds (cst);
3135
3136 if (bounds)
3137 return bounds;
3138
3139 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3140 || flag_chkp_use_static_const_bounds > 0)
3141 {
3142 tree bnd_var = chkp_make_static_bounds (cst);
3143 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3144 gimple stmt;
3145
3146 bounds = chkp_get_tmp_reg (NULL);
3147 stmt = gimple_build_assign (bounds, bnd_var);
3148 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3149 }
3150 else
3151 {
3152 lb = chkp_build_addr_expr (cst);
3153 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3154 bounds = chkp_make_bounds (lb, size, NULL, false);
3155 }
3156
3157 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3158
3159 return bounds;
3160 }
3161
3162 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3163 return the result. if ITER is not NULL then Code is inserted
3164 before position pointed by ITER. Otherwise code is added to
3165 entry block. */
3166 static tree
3167 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3168 {
3169 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3170 return bounds2 ? bounds2 : bounds1;
3171 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3172 return bounds1;
3173 else
3174 {
3175 gimple_seq seq;
3176 gimple stmt;
3177 tree bounds;
3178
3179 seq = NULL;
3180
3181 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3182 chkp_mark_stmt (stmt);
3183
3184 bounds = chkp_get_tmp_reg (stmt);
3185 gimple_call_set_lhs (stmt, bounds);
3186
3187 gimple_seq_add_stmt (&seq, stmt);
3188
3189 /* We are probably doing narrowing for constant expression.
3190 In such case iter may be undefined. */
3191 if (!iter)
3192 {
3193 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3194 iter = &gsi;
3195 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3196 }
3197 else
3198 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3199
3200 if (dump_file && (dump_flags & TDF_DETAILS))
3201 {
3202 fprintf (dump_file, "Bounds intersection: ");
3203 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3204 fprintf (dump_file, " inserted before statement: ");
3205 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3206 TDF_VOPS|TDF_MEMSYMS);
3207 }
3208
3209 return bounds;
3210 }
3211 }
3212
3213 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3214 and 0 othersize. */
3215 static bool
3216 chkp_may_narrow_to_field (tree field)
3217 {
3218 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3219 && tree_to_uhwi (DECL_SIZE (field)) != 0
3220 && (!DECL_FIELD_OFFSET (field)
3221 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3222 && (!DECL_FIELD_BIT_OFFSET (field)
3223 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3224 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3225 && !chkp_variable_size_type (TREE_TYPE (field));
3226 }
3227
3228 /* Return 1 if bounds for FIELD should be narrowed to
3229 field's own size. */
3230 static bool
3231 chkp_narrow_bounds_for_field (tree field)
3232 {
3233 HOST_WIDE_INT offs;
3234 HOST_WIDE_INT bit_offs;
3235
3236 if (!chkp_may_narrow_to_field (field))
3237 return false;
3238
3239 /* Accesse to compiler generated fields should not cause
3240 bounds narrowing. */
3241 if (DECL_ARTIFICIAL (field))
3242 return false;
3243
3244 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3245 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3246
3247 return (flag_chkp_narrow_bounds
3248 && (flag_chkp_first_field_has_own_bounds
3249 || offs
3250 || bit_offs));
3251 }
3252
3253 /* Perform narrowing for BOUNDS using bounds computed for field
3254 access COMPONENT. ITER meaning is the same as for
3255 chkp_intersect_bounds. */
3256 static tree
3257 chkp_narrow_bounds_to_field (tree bounds, tree component,
3258 gimple_stmt_iterator *iter)
3259 {
3260 tree field = TREE_OPERAND (component, 1);
3261 tree size = DECL_SIZE_UNIT (field);
3262 tree field_ptr = chkp_build_addr_expr (component);
3263 tree field_bounds;
3264
3265 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3266
3267 return chkp_intersect_bounds (field_bounds, bounds, iter);
3268 }
3269
3270 /* Parse field or array access NODE.
3271
3272 PTR ouput parameter holds a pointer to the outermost
3273 object.
3274
3275 BITFIELD output parameter is set to 1 if bitfield is
3276 accessed and to 0 otherwise. If it is 1 then ELT holds
3277 outer component for accessed bit field.
3278
3279 SAFE outer parameter is set to 1 if access is safe and
3280 checks are not required.
3281
3282 BOUNDS outer parameter holds bounds to be used to check
3283 access (may be NULL).
3284
3285 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3286 innermost accessed component. */
3287 static void
3288 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3289 tree *elt, bool *safe,
3290 bool *bitfield,
3291 tree *bounds,
3292 gimple_stmt_iterator *iter,
3293 bool innermost_bounds)
3294 {
3295 tree comp_to_narrow = NULL_TREE;
3296 tree last_comp = NULL_TREE;
3297 bool array_ref_found = false;
3298 tree *nodes;
3299 tree var;
3300 int len;
3301 int i;
3302
3303 /* Compute tree height for expression. */
3304 var = node;
3305 len = 1;
3306 while (TREE_CODE (var) == COMPONENT_REF
3307 || TREE_CODE (var) == ARRAY_REF
3308 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3309 {
3310 var = TREE_OPERAND (var, 0);
3311 len++;
3312 }
3313
3314 gcc_assert (len > 1);
3315
3316 /* It is more convenient for us to scan left-to-right,
3317 so walk tree again and put all node to nodes vector
3318 in reversed order. */
3319 nodes = XALLOCAVEC (tree, len);
3320 nodes[len - 1] = node;
3321 for (i = len - 2; i >= 0; i--)
3322 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3323
3324 if (bounds)
3325 *bounds = NULL;
3326 *safe = true;
3327 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3328 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3329 /* To get bitfield address we will need outer elemnt. */
3330 if (*bitfield)
3331 *elt = nodes[len - 2];
3332 else
3333 *elt = NULL_TREE;
3334
3335 /* If we have indirection in expression then compute
3336 outermost structure bounds. Computed bounds may be
3337 narrowed later. */
3338 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3339 {
3340 *safe = false;
3341 *ptr = TREE_OPERAND (nodes[0], 0);
3342 if (bounds)
3343 *bounds = chkp_find_bounds (*ptr, iter);
3344 }
3345 else
3346 {
3347 gcc_assert (TREE_CODE (var) == VAR_DECL
3348 || TREE_CODE (var) == PARM_DECL
3349 || TREE_CODE (var) == RESULT_DECL
3350 || TREE_CODE (var) == STRING_CST
3351 || TREE_CODE (var) == SSA_NAME);
3352
3353 *ptr = chkp_build_addr_expr (var);
3354 }
3355
3356 /* In this loop we are trying to find a field access
3357 requiring narrowing. There are two simple rules
3358 for search:
3359 1. Leftmost array_ref is chosen if any.
3360 2. Rightmost suitable component_ref is chosen if innermost
3361 bounds are required and no array_ref exists. */
3362 for (i = 1; i < len; i++)
3363 {
3364 var = nodes[i];
3365
3366 if (TREE_CODE (var) == ARRAY_REF)
3367 {
3368 *safe = false;
3369 array_ref_found = true;
3370 if (flag_chkp_narrow_bounds
3371 && !flag_chkp_narrow_to_innermost_arrray
3372 && (!last_comp
3373 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3374 {
3375 comp_to_narrow = last_comp;
3376 break;
3377 }
3378 }
3379 else if (TREE_CODE (var) == COMPONENT_REF)
3380 {
3381 tree field = TREE_OPERAND (var, 1);
3382
3383 if (innermost_bounds
3384 && !array_ref_found
3385 && chkp_narrow_bounds_for_field (field))
3386 comp_to_narrow = var;
3387 last_comp = var;
3388
3389 if (flag_chkp_narrow_bounds
3390 && flag_chkp_narrow_to_innermost_arrray
3391 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3392 {
3393 if (bounds)
3394 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3395 comp_to_narrow = NULL;
3396 }
3397 }
3398 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3399 /* Nothing to do for it. */
3400 ;
3401 else
3402 gcc_unreachable ();
3403 }
3404
3405 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3406 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3407
3408 if (innermost_bounds && bounds && !*bounds)
3409 *bounds = chkp_find_bounds (*ptr, iter);
3410 }
3411
3412 /* Compute and return bounds for address of OBJ. */
3413 static tree
3414 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3415 {
3416 tree bounds = chkp_get_registered_addr_bounds (obj);
3417
3418 if (bounds)
3419 return bounds;
3420
3421 switch (TREE_CODE (obj))
3422 {
3423 case VAR_DECL:
3424 case PARM_DECL:
3425 case RESULT_DECL:
3426 bounds = chkp_get_bounds_for_decl_addr (obj);
3427 break;
3428
3429 case STRING_CST:
3430 bounds = chkp_get_bounds_for_string_cst (obj);
3431 break;
3432
3433 case ARRAY_REF:
3434 case COMPONENT_REF:
3435 {
3436 tree elt;
3437 tree ptr;
3438 bool safe;
3439 bool bitfield;
3440
3441 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3442 &bitfield, &bounds, iter, true);
3443
3444 gcc_assert (bounds);
3445 }
3446 break;
3447
3448 case FUNCTION_DECL:
3449 case LABEL_DECL:
3450 bounds = chkp_get_zero_bounds ();
3451 break;
3452
3453 case MEM_REF:
3454 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3455 break;
3456
3457 case REALPART_EXPR:
3458 case IMAGPART_EXPR:
3459 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3460 break;
3461
3462 default:
3463 if (dump_file && (dump_flags & TDF_DETAILS))
3464 {
3465 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3466 "unexpected object of type %s\n",
3467 get_tree_code_name (TREE_CODE (obj)));
3468 print_node (dump_file, "", obj, 0);
3469 }
3470 internal_error ("chkp_make_addressed_object_bounds: "
3471 "Unexpected tree code %s",
3472 get_tree_code_name (TREE_CODE (obj)));
3473 }
3474
3475 chkp_register_addr_bounds (obj, bounds);
3476
3477 return bounds;
3478 }
3479
3480 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3481 to compute bounds if required. Computed bounds should be available at
3482 position pointed by ITER.
3483
3484 If PTR_SRC is NULL_TREE then pointer definition is identified.
3485
3486 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3487 PTR. If PTR is a any memory reference then ITER points to a statement
3488 after which bndldx will be inserterd. In both cases ITER will be updated
3489 to point to the inserted bndldx statement. */
3490
3491 static tree
3492 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3493 {
3494 tree addr = NULL_TREE;
3495 tree bounds = NULL_TREE;
3496
3497 if (!ptr_src)
3498 ptr_src = ptr;
3499
3500 bounds = chkp_get_registered_bounds (ptr_src);
3501
3502 if (bounds)
3503 return bounds;
3504
3505 switch (TREE_CODE (ptr_src))
3506 {
3507 case MEM_REF:
3508 case VAR_DECL:
3509 if (BOUNDED_P (ptr_src))
3510 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3511 bounds = chkp_get_zero_bounds ();
3512 else
3513 {
3514 addr = chkp_build_addr_expr (ptr_src);
3515 bounds = chkp_build_bndldx (addr, ptr, iter);
3516 }
3517 else
3518 bounds = chkp_get_nonpointer_load_bounds ();
3519 break;
3520
3521 case ARRAY_REF:
3522 case COMPONENT_REF:
3523 addr = get_base_address (ptr_src);
3524 if (DECL_P (addr)
3525 || TREE_CODE (addr) == MEM_REF
3526 || TREE_CODE (addr) == TARGET_MEM_REF)
3527 {
3528 if (BOUNDED_P (ptr_src))
3529 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3530 bounds = chkp_get_zero_bounds ();
3531 else
3532 {
3533 addr = chkp_build_addr_expr (ptr_src);
3534 bounds = chkp_build_bndldx (addr, ptr, iter);
3535 }
3536 else
3537 bounds = chkp_get_nonpointer_load_bounds ();
3538 }
3539 else
3540 {
3541 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3542 bounds = chkp_find_bounds (addr, iter);
3543 }
3544 break;
3545
3546 case PARM_DECL:
3547 gcc_unreachable ();
3548 bounds = chkp_get_bound_for_parm (ptr_src);
3549 break;
3550
3551 case TARGET_MEM_REF:
3552 addr = chkp_build_addr_expr (ptr_src);
3553 bounds = chkp_build_bndldx (addr, ptr, iter);
3554 break;
3555
3556 case SSA_NAME:
3557 bounds = chkp_get_registered_bounds (ptr_src);
3558 if (!bounds)
3559 {
3560 gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3561 gphi_iterator phi_iter;
3562
3563 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3564
3565 gcc_assert (bounds);
3566
3567 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3568 {
3569 unsigned i;
3570
3571 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3572 {
3573 tree arg = gimple_phi_arg_def (def_phi, i);
3574 tree arg_bnd;
3575 gphi *phi_bnd;
3576
3577 arg_bnd = chkp_find_bounds (arg, NULL);
3578
3579 /* chkp_get_bounds_by_definition created new phi
3580 statement and phi_iter points to it.
3581
3582 Previous call to chkp_find_bounds could create
3583 new basic block and therefore change phi statement
3584 phi_iter points to. */
3585 phi_bnd = phi_iter.phi ();
3586
3587 add_phi_arg (phi_bnd, arg_bnd,
3588 gimple_phi_arg_edge (def_phi, i),
3589 UNKNOWN_LOCATION);
3590 }
3591
3592 /* If all bound phi nodes have their arg computed
3593 then we may finish its computation. See
3594 chkp_finish_incomplete_bounds for more details. */
3595 if (chkp_may_finish_incomplete_bounds ())
3596 chkp_finish_incomplete_bounds ();
3597 }
3598
3599 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3600 || chkp_incomplete_bounds (bounds));
3601 }
3602 break;
3603
3604 case ADDR_EXPR:
3605 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3606 break;
3607
3608 case INTEGER_CST:
3609 if (integer_zerop (ptr_src))
3610 bounds = chkp_get_none_bounds ();
3611 else
3612 bounds = chkp_get_invalid_op_bounds ();
3613 break;
3614
3615 default:
3616 if (dump_file && (dump_flags & TDF_DETAILS))
3617 {
3618 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3619 get_tree_code_name (TREE_CODE (ptr_src)));
3620 print_node (dump_file, "", ptr_src, 0);
3621 }
3622 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3623 get_tree_code_name (TREE_CODE (ptr_src)));
3624 }
3625
3626 if (!bounds)
3627 {
3628 if (dump_file && (dump_flags & TDF_DETAILS))
3629 {
3630 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3631 print_node (dump_file, "", ptr_src, 0);
3632 }
3633 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3634 }
3635
3636 return bounds;
3637 }
3638
3639 /* Normal case for bounds search without forced narrowing. */
3640 static tree
3641 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3642 {
3643 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3644 }
3645
3646 /* Search bounds for pointer PTR loaded from PTR_SRC
3647 by statement *ITER points to. */
3648 static tree
3649 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3650 {
3651 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3652 }
3653
3654 /* Helper function which checks type of RHS and finds all pointers in
3655 it. For each found pointer we build it's accesses in LHS and RHS
3656 objects and then call HANDLER for them. Function is used to copy
3657 or initilize bounds for copied object. */
3658 static void
3659 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3660 assign_handler handler)
3661 {
3662 tree type = TREE_TYPE (lhs);
3663
3664 /* We have nothing to do with clobbers. */
3665 if (TREE_CLOBBER_P (rhs))
3666 return;
3667
3668 if (BOUNDED_TYPE_P (type))
3669 handler (lhs, rhs, arg);
3670 else if (RECORD_OR_UNION_TYPE_P (type))
3671 {
3672 tree field;
3673
3674 if (TREE_CODE (rhs) == CONSTRUCTOR)
3675 {
3676 unsigned HOST_WIDE_INT cnt;
3677 tree val;
3678
3679 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3680 {
3681 if (chkp_type_has_pointer (TREE_TYPE (field)))
3682 {
3683 tree lhs_field = chkp_build_component_ref (lhs, field);
3684 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3685 }
3686 }
3687 }
3688 else
3689 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3690 if (TREE_CODE (field) == FIELD_DECL
3691 && chkp_type_has_pointer (TREE_TYPE (field)))
3692 {
3693 tree rhs_field = chkp_build_component_ref (rhs, field);
3694 tree lhs_field = chkp_build_component_ref (lhs, field);
3695 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3696 }
3697 }
3698 else if (TREE_CODE (type) == ARRAY_TYPE)
3699 {
3700 unsigned HOST_WIDE_INT cur = 0;
3701 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3702 tree etype = TREE_TYPE (type);
3703 tree esize = TYPE_SIZE (etype);
3704
3705 if (TREE_CODE (rhs) == CONSTRUCTOR)
3706 {
3707 unsigned HOST_WIDE_INT cnt;
3708 tree purp, val, lhs_elem;
3709
3710 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3711 {
3712 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3713 {
3714 tree lo_index = TREE_OPERAND (purp, 0);
3715 tree hi_index = TREE_OPERAND (purp, 1);
3716
3717 for (cur = (unsigned)tree_to_uhwi (lo_index);
3718 cur <= (unsigned)tree_to_uhwi (hi_index);
3719 cur++)
3720 {
3721 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3722 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3723 }
3724 }
3725 else
3726 {
3727 if (purp)
3728 {
3729 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3730 cur = tree_to_uhwi (purp);
3731 }
3732
3733 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3734
3735 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3736 }
3737 }
3738 }
3739 /* Copy array only when size is known. */
3740 else if (maxval && !integer_minus_onep (maxval))
3741 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3742 {
3743 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3744 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3745 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3746 }
3747 }
3748 else
3749 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3750 get_tree_code_name (TREE_CODE (type)));
3751 }
3752
3753 /* Add code to copy bounds for assignment of RHS to LHS.
3754 ARG is an iterator pointing ne code position. */
3755 static void
3756 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3757 {
3758 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3759 tree bounds = chkp_find_bounds (rhs, iter);
3760 tree addr = chkp_build_addr_expr(lhs);
3761
3762 chkp_build_bndstx (addr, rhs, bounds, iter);
3763 }
3764
3765 /* Emit static bound initilizers and size vars. */
3766 void
3767 chkp_finish_file (void)
3768 {
3769 struct varpool_node *node;
3770 struct chkp_ctor_stmt_list stmts;
3771
3772 if (seen_error ())
3773 return;
3774
3775 /* Iterate through varpool and generate bounds initialization
3776 constructors for all statically initialized pointers. */
3777 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3778 stmts.stmts = NULL;
3779 FOR_EACH_VARIABLE (node)
3780 /* Check that var is actually emitted and we need and may initialize
3781 its bounds. */
3782 if (node->need_bounds_init
3783 && !POINTER_BOUNDS_P (node->decl)
3784 && DECL_RTL (node->decl)
3785 && MEM_P (DECL_RTL (node->decl))
3786 && TREE_ASM_WRITTEN (node->decl))
3787 {
3788 chkp_walk_pointer_assignments (node->decl,
3789 DECL_INITIAL (node->decl),
3790 &stmts,
3791 chkp_add_modification_to_stmt_list);
3792
3793 if (stmts.avail <= 0)
3794 {
3795 cgraph_build_static_cdtor ('P', stmts.stmts,
3796 MAX_RESERVED_INIT_PRIORITY + 3);
3797 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3798 stmts.stmts = NULL;
3799 }
3800 }
3801
3802 if (stmts.stmts)
3803 cgraph_build_static_cdtor ('P', stmts.stmts,
3804 MAX_RESERVED_INIT_PRIORITY + 3);
3805
3806 /* Iterate through varpool and generate bounds initialization
3807 constructors for all static bounds vars. */
3808 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3809 stmts.stmts = NULL;
3810 FOR_EACH_VARIABLE (node)
3811 if (node->need_bounds_init
3812 && POINTER_BOUNDS_P (node->decl)
3813 && TREE_ASM_WRITTEN (node->decl))
3814 {
3815 tree bnd = node->decl;
3816 tree var;
3817
3818 gcc_assert (DECL_INITIAL (bnd)
3819 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3820
3821 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3822 chkp_output_static_bounds (bnd, var, &stmts);
3823 }
3824
3825 if (stmts.stmts)
3826 cgraph_build_static_cdtor ('B', stmts.stmts,
3827 MAX_RESERVED_INIT_PRIORITY + 2);
3828
3829 delete chkp_static_var_bounds;
3830 delete chkp_bounds_map;
3831 }
3832
3833 /* An instrumentation function which is called for each statement
3834 having memory access we want to instrument. It inserts check
3835 code and bounds copy code.
3836
3837 ITER points to statement to instrument.
3838
3839 NODE holds memory access in statement to check.
3840
3841 LOC holds the location information for statement.
3842
3843 DIRFLAGS determines whether access is read or write.
3844
3845 ACCESS_OFFS should be added to address used in NODE
3846 before check.
3847
3848 ACCESS_SIZE holds size of checked access.
3849
3850 SAFE indicates if NODE access is safe and should not be
3851 checked. */
3852 static void
3853 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3854 location_t loc, tree dirflag,
3855 tree access_offs, tree access_size,
3856 bool safe)
3857 {
3858 tree node_type = TREE_TYPE (node);
3859 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3860 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3861 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3862 tree ptr = NULL_TREE; /* a pointer used for dereference */
3863 tree bounds = NULL_TREE;
3864
3865 /* We do not need instrumentation for clobbers. */
3866 if (dirflag == integer_one_node
3867 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3868 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3869 return;
3870
3871 switch (TREE_CODE (node))
3872 {
3873 case ARRAY_REF:
3874 case COMPONENT_REF:
3875 {
3876 bool bitfield;
3877 tree elt;
3878
3879 if (safe)
3880 {
3881 /* We are not going to generate any checks, so do not
3882 generate bounds as well. */
3883 addr_first = chkp_build_addr_expr (node);
3884 break;
3885 }
3886
3887 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3888 &bitfield, &bounds, iter, false);
3889
3890 /* Break if there is no dereference and operation is safe. */
3891
3892 if (bitfield)
3893 {
3894 tree field = TREE_OPERAND (node, 1);
3895
3896 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3897 size = DECL_SIZE_UNIT (field);
3898
3899 if (elt)
3900 elt = chkp_build_addr_expr (elt);
3901 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3902 addr_first = fold_build_pointer_plus_loc (loc,
3903 addr_first,
3904 byte_position (field));
3905 }
3906 else
3907 addr_first = chkp_build_addr_expr (node);
3908 }
3909 break;
3910
3911 case INDIRECT_REF:
3912 ptr = TREE_OPERAND (node, 0);
3913 addr_first = ptr;
3914 break;
3915
3916 case MEM_REF:
3917 ptr = TREE_OPERAND (node, 0);
3918 addr_first = chkp_build_addr_expr (node);
3919 break;
3920
3921 case TARGET_MEM_REF:
3922 ptr = TMR_BASE (node);
3923 addr_first = chkp_build_addr_expr (node);
3924 break;
3925
3926 case ARRAY_RANGE_REF:
3927 printf("ARRAY_RANGE_REF\n");
3928 debug_gimple_stmt(gsi_stmt(*iter));
3929 debug_tree(node);
3930 gcc_unreachable ();
3931 break;
3932
3933 case BIT_FIELD_REF:
3934 {
3935 tree offs, rem, bpu;
3936
3937 gcc_assert (!access_offs);
3938 gcc_assert (!access_size);
3939
3940 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3941 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3942 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3943 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
3944
3945 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
3946 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
3947 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
3948 size = fold_convert (size_type_node, size);
3949
3950 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
3951 dirflag, offs, size, safe);
3952 return;
3953 }
3954 break;
3955
3956 case VAR_DECL:
3957 case RESULT_DECL:
3958 case PARM_DECL:
3959 if (dirflag != integer_one_node
3960 || DECL_REGISTER (node))
3961 return;
3962
3963 safe = true;
3964 addr_first = chkp_build_addr_expr (node);
3965 break;
3966
3967 default:
3968 return;
3969 }
3970
3971 /* If addr_last was not computed then use (addr_first + size - 1)
3972 expression to compute it. */
3973 if (!addr_last)
3974 {
3975 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
3976 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
3977 }
3978
3979 /* Shift both first_addr and last_addr by access_offs if specified. */
3980 if (access_offs)
3981 {
3982 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
3983 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
3984 }
3985
3986 /* Generate bndcl/bndcu checks if memory access is not safe. */
3987 if (!safe)
3988 {
3989 gimple_stmt_iterator stmt_iter = *iter;
3990
3991 if (!bounds)
3992 bounds = chkp_find_bounds (ptr, iter);
3993
3994 chkp_check_mem_access (addr_first, addr_last, bounds,
3995 stmt_iter, loc, dirflag);
3996 }
3997
3998 /* We need to store bounds in case pointer is stored. */
3999 if (dirflag == integer_one_node
4000 && chkp_type_has_pointer (node_type)
4001 && flag_chkp_store_bounds)
4002 {
4003 gimple stmt = gsi_stmt (*iter);
4004 tree rhs1 = gimple_assign_rhs1 (stmt);
4005 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4006
4007 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4008 chkp_walk_pointer_assignments (node, rhs1, iter,
4009 chkp_copy_bounds_for_elem);
4010 else
4011 {
4012 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4013 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4014 }
4015 }
4016 }
4017
4018 /* Add code to copy bounds for all pointers copied
4019 in ASSIGN created during inline of EDGE. */
4020 void
4021 chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
4022 {
4023 tree lhs = gimple_assign_lhs (assign);
4024 tree rhs = gimple_assign_rhs1 (assign);
4025 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4026
4027 if (!flag_chkp_store_bounds)
4028 return;
4029
4030 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4031
4032 /* We should create edges for all created calls to bndldx and bndstx. */
4033 while (gsi_stmt (iter) != assign)
4034 {
4035 gimple stmt = gsi_stmt (iter);
4036 if (gimple_code (stmt) == GIMPLE_CALL)
4037 {
4038 tree fndecl = gimple_call_fndecl (stmt);
4039 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4040 struct cgraph_edge *new_edge;
4041
4042 gcc_assert (fndecl == chkp_bndstx_fndecl
4043 || fndecl == chkp_bndldx_fndecl
4044 || fndecl == chkp_ret_bnd_fndecl);
4045
4046 new_edge = edge->caller->create_edge (callee,
4047 as_a <gcall *> (stmt),
4048 edge->count,
4049 edge->frequency);
4050 new_edge->frequency = compute_call_stmt_bb_frequency
4051 (edge->caller->decl, gimple_bb (stmt));
4052 }
4053 gsi_prev (&iter);
4054 }
4055 }
4056
4057 /* Some code transformation made during instrumentation pass
4058 may put code into inconsistent state. Here we find and fix
4059 such flaws. */
4060 void
4061 chkp_fix_cfg ()
4062 {
4063 basic_block bb;
4064 gimple_stmt_iterator i;
4065
4066 /* We could insert some code right after stmt which ends bb.
4067 We wanted to put this code on fallthru edge but did not
4068 add new edges from the beginning because it may cause new
4069 phi node creation which may be incorrect due to incomplete
4070 bound phi nodes. */
4071 FOR_ALL_BB_FN (bb, cfun)
4072 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4073 {
4074 gimple stmt = gsi_stmt (i);
4075 gimple_stmt_iterator next = i;
4076
4077 gsi_next (&next);
4078
4079 if (stmt_ends_bb_p (stmt)
4080 && !gsi_end_p (next))
4081 {
4082 edge fall = find_fallthru_edge (bb->succs);
4083 basic_block dest = NULL;
4084 int flags = 0;
4085
4086 gcc_assert (fall);
4087
4088 /* We cannot split abnormal edge. Therefore we
4089 store its params, make it regular and then
4090 rebuild abnormal edge after split. */
4091 if (fall->flags & EDGE_ABNORMAL)
4092 {
4093 flags = fall->flags & ~EDGE_FALLTHRU;
4094 dest = fall->dest;
4095
4096 fall->flags &= ~EDGE_COMPLEX;
4097 }
4098
4099 while (!gsi_end_p (next))
4100 {
4101 gimple next_stmt = gsi_stmt (next);
4102 gsi_remove (&next, false);
4103 gsi_insert_on_edge (fall, next_stmt);
4104 }
4105
4106 gsi_commit_edge_inserts ();
4107
4108 /* Re-create abnormal edge. */
4109 if (dest)
4110 make_edge (bb, dest, flags);
4111 }
4112 }
4113 }
4114
4115 /* Walker callback for chkp_replace_function_pointers. Replaces
4116 function pointer in the specified operand with pointer to the
4117 instrumented function version. */
4118 static tree
4119 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4120 void *data ATTRIBUTE_UNUSED)
4121 {
4122 if (TREE_CODE (*op) == FUNCTION_DECL
4123 && chkp_instrumentable_p (*op)
4124 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4125 /* For builtins we replace pointers only for selected
4126 function and functions having definitions. */
4127 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4128 && (chkp_instrument_normal_builtin (*op)
4129 || gimple_has_body_p (*op)))))
4130 {
4131 struct cgraph_node *node = cgraph_node::get_create (*op);
4132 struct cgraph_node *clone = NULL;
4133
4134 if (!node->instrumentation_clone)
4135 clone = chkp_maybe_create_clone (*op);
4136
4137 if (clone)
4138 *op = clone->decl;
4139 *walk_subtrees = 0;
4140 }
4141
4142 return NULL;
4143 }
4144
4145 /* This function searches for function pointers in statement
4146 pointed by GSI and replaces them with pointers to instrumented
4147 function versions. */
4148 static void
4149 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4150 {
4151 gimple stmt = gsi_stmt (*gsi);
4152 /* For calls we want to walk call args only. */
4153 if (gimple_code (stmt) == GIMPLE_CALL)
4154 {
4155 unsigned i;
4156 for (i = 0; i < gimple_call_num_args (stmt); i++)
4157 walk_tree (gimple_call_arg_ptr (stmt, i),
4158 chkp_replace_function_pointer, NULL, NULL);
4159 }
4160 else
4161 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4162 }
4163
4164 /* This function instruments all statements working with memory,
4165 calls and rets.
4166
4167 It also removes excess statements from static initializers. */
4168 static void
4169 chkp_instrument_function (void)
4170 {
4171 basic_block bb, next;
4172 gimple_stmt_iterator i;
4173 enum gimple_rhs_class grhs_class;
4174 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4175
4176 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4177 do
4178 {
4179 next = bb->next_bb;
4180 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4181 {
4182 gimple s = gsi_stmt (i);
4183
4184 /* Skip statement marked to not be instrumented. */
4185 if (chkp_marked_stmt_p (s))
4186 {
4187 gsi_next (&i);
4188 continue;
4189 }
4190
4191 chkp_replace_function_pointers (&i);
4192
4193 switch (gimple_code (s))
4194 {
4195 case GIMPLE_ASSIGN:
4196 chkp_process_stmt (&i, gimple_assign_lhs (s),
4197 gimple_location (s), integer_one_node,
4198 NULL_TREE, NULL_TREE, safe);
4199 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4200 gimple_location (s), integer_zero_node,
4201 NULL_TREE, NULL_TREE, safe);
4202 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4203 if (grhs_class == GIMPLE_BINARY_RHS)
4204 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4205 gimple_location (s), integer_zero_node,
4206 NULL_TREE, NULL_TREE, safe);
4207 break;
4208
4209 case GIMPLE_RETURN:
4210 {
4211 greturn *r = as_a <greturn *> (s);
4212 if (gimple_return_retval (r) != NULL_TREE)
4213 {
4214 chkp_process_stmt (&i, gimple_return_retval (r),
4215 gimple_location (r),
4216 integer_zero_node,
4217 NULL_TREE, NULL_TREE, safe);
4218
4219 /* Additionally we need to add bounds
4220 to return statement. */
4221 chkp_add_bounds_to_ret_stmt (&i);
4222 }
4223 }
4224 break;
4225
4226 case GIMPLE_CALL:
4227 chkp_add_bounds_to_call_stmt (&i);
4228 break;
4229
4230 default:
4231 ;
4232 }
4233
4234 gsi_next (&i);
4235
4236 /* We do not need any actual pointer stores in checker
4237 static initializer. */
4238 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4239 && gimple_code (s) == GIMPLE_ASSIGN
4240 && gimple_store_p (s))
4241 {
4242 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4243 gsi_remove (&del_iter, true);
4244 unlink_stmt_vdef (s);
4245 release_defs(s);
4246 }
4247 }
4248 bb = next;
4249 }
4250 while (bb);
4251
4252 /* Some input params may have bounds and be address taken. In this case
4253 we should store incoming bounds into bounds table. */
4254 tree arg;
4255 if (flag_chkp_store_bounds)
4256 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4257 if (TREE_ADDRESSABLE (arg))
4258 {
4259 if (BOUNDED_P (arg))
4260 {
4261 tree bounds = chkp_get_next_bounds_parm (arg);
4262 tree def_ptr = ssa_default_def (cfun, arg);
4263 gimple_stmt_iterator iter
4264 = gsi_start_bb (chkp_get_entry_block ());
4265 chkp_build_bndstx (chkp_build_addr_expr (arg),
4266 def_ptr ? def_ptr : arg,
4267 bounds, &iter);
4268
4269 /* Skip bounds arg. */
4270 arg = TREE_CHAIN (arg);
4271 }
4272 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4273 {
4274 tree orig_arg = arg;
4275 bitmap slots = BITMAP_ALLOC (NULL);
4276 gimple_stmt_iterator iter
4277 = gsi_start_bb (chkp_get_entry_block ());
4278 bitmap_iterator bi;
4279 unsigned bnd_no;
4280
4281 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4282
4283 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4284 {
4285 tree bounds = chkp_get_next_bounds_parm (arg);
4286 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4287 tree addr = chkp_build_addr_expr (orig_arg);
4288 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4289 build_int_cst (ptr_type_node, offs));
4290 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4291 bounds, &iter);
4292
4293 arg = DECL_CHAIN (arg);
4294 }
4295 BITMAP_FREE (slots);
4296 }
4297 }
4298 }
4299
4300 /* Find init/null/copy_ptr_bounds calls and replace them
4301 with assignments. It should allow better code
4302 optimization. */
4303
4304 static void
4305 chkp_remove_useless_builtins ()
4306 {
4307 basic_block bb;
4308 gimple_stmt_iterator gsi;
4309
4310 FOR_EACH_BB_FN (bb, cfun)
4311 {
4312 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4313 {
4314 gimple stmt = gsi_stmt (gsi);
4315 tree fndecl;
4316 enum built_in_function fcode;
4317
4318 /* Find builtins returning first arg and replace
4319 them with assignments. */
4320 if (gimple_code (stmt) == GIMPLE_CALL
4321 && (fndecl = gimple_call_fndecl (stmt))
4322 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4323 && (fcode = DECL_FUNCTION_CODE (fndecl))
4324 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4325 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4326 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4327 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4328 {
4329 tree res = gimple_call_arg (stmt, 0);
4330 update_call_from_tree (&gsi, res);
4331 stmt = gsi_stmt (gsi);
4332 update_stmt (stmt);
4333 }
4334 }
4335 }
4336 }
4337
4338 /* Initialize pass. */
4339 static void
4340 chkp_init (void)
4341 {
4342 basic_block bb;
4343 gimple_stmt_iterator i;
4344
4345 in_chkp_pass = true;
4346
4347 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4348 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4349 chkp_unmark_stmt (gsi_stmt (i));
4350
4351 chkp_invalid_bounds = new hash_set<tree>;
4352 chkp_completed_bounds_set = new hash_set<tree>;
4353 delete chkp_reg_bounds;
4354 chkp_reg_bounds = new hash_map<tree, tree>;
4355 delete chkp_bound_vars;
4356 chkp_bound_vars = new hash_map<tree, tree>;
4357 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4358 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4359 delete chkp_bounds_map;
4360 chkp_bounds_map = new hash_map<tree, tree>;
4361 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4362
4363 entry_block = NULL;
4364 zero_bounds = NULL_TREE;
4365 none_bounds = NULL_TREE;
4366 incomplete_bounds = integer_zero_node;
4367 tmp_var = NULL_TREE;
4368 size_tmp_var = NULL_TREE;
4369
4370 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4371
4372 /* We create these constant bounds once for each object file.
4373 These symbols go to comdat section and result in single copy
4374 of each one in the final binary. */
4375 chkp_get_zero_bounds_var ();
4376 chkp_get_none_bounds_var ();
4377
4378 calculate_dominance_info (CDI_DOMINATORS);
4379 calculate_dominance_info (CDI_POST_DOMINATORS);
4380
4381 bitmap_obstack_initialize (NULL);
4382 }
4383
4384 /* Finalize instrumentation pass. */
4385 static void
4386 chkp_fini (void)
4387 {
4388 in_chkp_pass = false;
4389
4390 delete chkp_invalid_bounds;
4391 delete chkp_completed_bounds_set;
4392 delete chkp_reg_addr_bounds;
4393 delete chkp_incomplete_bounds_map;
4394
4395 free_dominance_info (CDI_DOMINATORS);
4396 free_dominance_info (CDI_POST_DOMINATORS);
4397
4398 bitmap_obstack_release (NULL);
4399
4400 entry_block = NULL;
4401 zero_bounds = NULL_TREE;
4402 none_bounds = NULL_TREE;
4403 }
4404
4405 /* Main instrumentation pass function. */
4406 static unsigned int
4407 chkp_execute (void)
4408 {
4409 chkp_init ();
4410
4411 chkp_instrument_function ();
4412
4413 chkp_remove_useless_builtins ();
4414
4415 chkp_function_mark_instrumented (cfun->decl);
4416
4417 chkp_fix_cfg ();
4418
4419 chkp_fini ();
4420
4421 return 0;
4422 }
4423
4424 /* Instrumentation pass gate. */
4425 static bool
4426 chkp_gate (void)
4427 {
4428 cgraph_node *node = cgraph_node::get (cfun->decl);
4429 return ((node != NULL
4430 && node->instrumentation_clone)
4431 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4432 }
4433
4434 namespace {
4435
4436 const pass_data pass_data_chkp =
4437 {
4438 GIMPLE_PASS, /* type */
4439 "chkp", /* name */
4440 OPTGROUP_NONE, /* optinfo_flags */
4441 TV_NONE, /* tv_id */
4442 PROP_ssa | PROP_cfg, /* properties_required */
4443 0, /* properties_provided */
4444 0, /* properties_destroyed */
4445 0, /* todo_flags_start */
4446 TODO_verify_il
4447 | TODO_update_ssa /* todo_flags_finish */
4448 };
4449
4450 class pass_chkp : public gimple_opt_pass
4451 {
4452 public:
4453 pass_chkp (gcc::context *ctxt)
4454 : gimple_opt_pass (pass_data_chkp, ctxt)
4455 {}
4456
4457 /* opt_pass methods: */
4458 virtual opt_pass * clone ()
4459 {
4460 return new pass_chkp (m_ctxt);
4461 }
4462
4463 virtual bool gate (function *)
4464 {
4465 return chkp_gate ();
4466 }
4467
4468 virtual unsigned int execute (function *)
4469 {
4470 return chkp_execute ();
4471 }
4472
4473 }; // class pass_chkp
4474
4475 } // anon namespace
4476
4477 gimple_opt_pass *
4478 make_pass_chkp (gcc::context *ctxt)
4479 {
4480 return new pass_chkp (ctxt);
4481 }
4482
4483 #include "gt-tree-chkp.h"