PR c/71171: Fix uninitialized source_range in c_parser_postfix_expression
[gcc.git] / gcc / tree-chkp.c
1 /* Pointer Bounds Checker insrumentation pass.
2 Copyright (C) 2014-2016 Free Software Foundation, Inc.
3 Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "diagnostic.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "varasm.h"
37 #include "tree-iterator.h"
38 #include "tree-cfg.h"
39 #include "langhooks.h"
40 #include "tree-ssa-address.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "gimple-pretty-print.h"
43 #include "gimple-iterator.h"
44 #include "gimplify.h"
45 #include "gimplify-me.h"
46 #include "print-tree.h"
47 #include "calls.h"
48 #include "expr.h"
49 #include "tree-ssa-propagate.h"
50 #include "tree-chkp.h"
51 #include "gimple-walk.h"
52 #include "tree-dfa.h"
53 #include "ipa-chkp.h"
54 #include "params.h"
55
56 /* Pointer Bounds Checker instruments code with memory checks to find
57 out-of-bounds memory accesses. Checks are performed by computing
58 bounds for each pointer and then comparing address of accessed
59 memory before pointer dereferencing.
60
61 1. Function clones.
62
63 See ipa-chkp.c.
64
65 2. Instrumentation.
66
67 There are few things to instrument:
68
69 a) Memory accesses - add checker calls to check address of accessed memory
70 against bounds of dereferenced pointer. Obviously safe memory
71 accesses like static variable access does not have to be instrumented
72 with checks.
73
74 Example:
75
76 val_2 = *p_1;
77
78 with 4 bytes access is transformed into:
79
80 __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
81 D.1_4 = p_1 + 3;
82 __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
83 val_2 = *p_1;
84
85 where __bound_tmp.1_3 are bounds computed for pointer p_1,
86 __builtin___chkp_bndcl is a lower bound check and
87 __builtin___chkp_bndcu is an upper bound check.
88
89 b) Pointer stores.
90
91 When pointer is stored in memory we need to store its bounds. To
92 achieve compatibility of instrumented code with regular codes
93 we have to keep data layout and store bounds in special bound tables
94 via special checker call. Implementation of bounds table may vary for
95 different platforms. It has to associate pointer value and its
96 location (it is required because we may have two equal pointers
97 with different bounds stored in different places) with bounds.
98 Another checker builtin allows to get bounds for specified pointer
99 loaded from specified location.
100
101 Example:
102
103 buf1[i_1] = &buf2;
104
105 is transformed into:
106
107 buf1[i_1] = &buf2;
108 D.1_2 = &buf1[i_1];
109 __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
110
111 where __bound_tmp.1_2 are bounds of &buf2.
112
113 c) Static initialization.
114
115 The special case of pointer store is static pointer initialization.
116 Bounds initialization is performed in a few steps:
117 - register all static initializations in front-end using
118 chkp_register_var_initializer
119 - when file compilation finishes we create functions with special
120 attribute 'chkp ctor' and put explicit initialization code
121 (assignments) for all statically initialized pointers.
122 - when checker constructor is compiled checker pass adds required
123 bounds initialization for all statically initialized pointers
124 - since we do not actually need excess pointers initialization
125 in checker constructor we remove such assignments from them
126
127 d) Calls.
128
129 For each call in the code we add additional arguments to pass
130 bounds for pointer arguments. We determine type of call arguments
131 using arguments list from function declaration; if function
132 declaration is not available we use function type; otherwise
133 (e.g. for unnamed arguments) we use type of passed value. Function
134 declaration/type is replaced with the instrumented one.
135
136 Example:
137
138 val_1 = foo (&buf1, &buf2, &buf1, 0);
139
140 is translated into:
141
142 val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
143 &buf1, __bound_tmp.1_2, 0);
144
145 e) Returns.
146
147 If function returns a pointer value we have to return bounds also.
148 A new operand was added for return statement to hold returned bounds.
149
150 Example:
151
152 return &_buf1;
153
154 is transformed into
155
156 return &_buf1, __bound_tmp.1_1;
157
158 3. Bounds computation.
159
160 Compiler is fully responsible for computing bounds to be used for each
161 memory access. The first step for bounds computation is to find the
162 origin of pointer dereferenced for memory access. Basing on pointer
163 origin we define a way to compute its bounds. There are just few
164 possible cases:
165
166 a) Pointer is returned by call.
167
168 In this case we use corresponding checker builtin method to obtain returned
169 bounds.
170
171 Example:
172
173 buf_1 = malloc (size_2);
174 foo (buf_1);
175
176 is translated into:
177
178 buf_1 = malloc (size_2);
179 __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
180 foo (buf_1, __bound_tmp.1_3);
181
182 b) Pointer is an address of an object.
183
184 In this case compiler tries to compute objects size and create corresponding
185 bounds. If object has incomplete type then special checker builtin is used to
186 obtain its size at runtime.
187
188 Example:
189
190 foo ()
191 {
192 <unnamed type> __bound_tmp.3;
193 static int buf[100];
194
195 <bb 3>:
196 __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
197
198 <bb 2>:
199 return &buf, __bound_tmp.3_2;
200 }
201
202 Example:
203
204 Address of an object 'extern int buf[]' with incomplete type is
205 returned.
206
207 foo ()
208 {
209 <unnamed type> __bound_tmp.4;
210 long unsigned int __size_tmp.3;
211
212 <bb 3>:
213 __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
214 __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
215
216 <bb 2>:
217 return &buf, __bound_tmp.4_3;
218 }
219
220 c) Pointer is the result of object narrowing.
221
222 It happens when we use pointer to an object to compute pointer to a part
223 of an object. E.g. we take pointer to a field of a structure. In this
224 case we perform bounds intersection using bounds of original object and
225 bounds of object's part (which are computed basing on its type).
226
227 There may be some debatable questions about when narrowing should occur
228 and when it should not. To avoid false bound violations in correct
229 programs we do not perform narrowing when address of an array element is
230 obtained (it has address of the whole array) and when address of the first
231 structure field is obtained (because it is guaranteed to be equal to
232 address of the whole structure and it is legal to cast it back to structure).
233
234 Default narrowing behavior may be changed using compiler flags.
235
236 Example:
237
238 In this example address of the second structure field is returned.
239
240 foo (struct A * p, __bounds_type __bounds_of_p)
241 {
242 <unnamed type> __bound_tmp.3;
243 int * _2;
244 int * _5;
245
246 <bb 2>:
247 _5 = &p_1(D)->second_field;
248 __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
249 __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
250 __bounds_of_p_3(D));
251 _2 = &p_1(D)->second_field;
252 return _2, __bound_tmp.3_8;
253 }
254
255 Example:
256
257 In this example address of the first field of array element is returned.
258
259 foo (struct A * p, __bounds_type __bounds_of_p, int i)
260 {
261 long unsigned int _3;
262 long unsigned int _4;
263 struct A * _6;
264 int * _7;
265
266 <bb 2>:
267 _3 = (long unsigned int) i_1(D);
268 _4 = _3 * 8;
269 _6 = p_5(D) + _4;
270 _7 = &_6->first_field;
271 return _7, __bounds_of_p_2(D);
272 }
273
274
275 d) Pointer is the result of pointer arithmetic or type cast.
276
277 In this case bounds of the base pointer are used. In case of binary
278 operation producing a pointer we are analyzing data flow further
279 looking for operand's bounds. One operand is considered as a base
280 if it has some valid bounds. If we fall into a case when none of
281 operands (or both of them) has valid bounds, a default bounds value
282 is used.
283
284 Trying to find out bounds for binary operations we may fall into
285 cyclic dependencies for pointers. To avoid infinite recursion all
286 walked phi nodes instantly obtain corresponding bounds but created
287 bounds are marked as incomplete. It helps us to stop DF walk during
288 bounds search.
289
290 When we reach pointer source, some args of incomplete bounds phi obtain
291 valid bounds and those values are propagated further through phi nodes.
292 If no valid bounds were found for phi node then we mark its result as
293 invalid bounds. Process stops when all incomplete bounds become either
294 valid or invalid and we are able to choose a pointer base.
295
296 e) Pointer is loaded from the memory.
297
298 In this case we just need to load bounds from the bounds table.
299
300 Example:
301
302 foo ()
303 {
304 <unnamed type> __bound_tmp.3;
305 static int * buf;
306 int * _2;
307
308 <bb 2>:
309 _2 = buf;
310 __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
311 return _2, __bound_tmp.3_4;
312 }
313
314 */
315
316 typedef void (*assign_handler)(tree, tree, void *);
317
318 static tree chkp_get_zero_bounds ();
319 static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
320 static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
321 gimple_stmt_iterator *iter);
322 static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
323 tree *elt, bool *safe,
324 bool *bitfield,
325 tree *bounds,
326 gimple_stmt_iterator *iter,
327 bool innermost_bounds);
328
329 #define chkp_bndldx_fndecl \
330 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
331 #define chkp_bndstx_fndecl \
332 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
333 #define chkp_checkl_fndecl \
334 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
335 #define chkp_checku_fndecl \
336 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
337 #define chkp_bndmk_fndecl \
338 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
339 #define chkp_ret_bnd_fndecl \
340 (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
341 #define chkp_intersect_fndecl \
342 (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
343 #define chkp_narrow_bounds_fndecl \
344 (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
345 #define chkp_sizeof_fndecl \
346 (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
347 #define chkp_extract_lower_fndecl \
348 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
349 #define chkp_extract_upper_fndecl \
350 (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
351
352 static GTY (()) tree chkp_uintptr_type;
353
354 static GTY (()) tree chkp_zero_bounds_var;
355 static GTY (()) tree chkp_none_bounds_var;
356
357 static GTY (()) basic_block entry_block;
358 static GTY (()) tree zero_bounds;
359 static GTY (()) tree none_bounds;
360 static GTY (()) tree incomplete_bounds;
361 static GTY (()) tree tmp_var;
362 static GTY (()) tree size_tmp_var;
363 static GTY (()) bitmap chkp_abnormal_copies;
364
365 struct hash_set<tree> *chkp_invalid_bounds;
366 struct hash_set<tree> *chkp_completed_bounds_set;
367 struct hash_map<tree, tree> *chkp_reg_bounds;
368 struct hash_map<tree, tree> *chkp_bound_vars;
369 struct hash_map<tree, tree> *chkp_reg_addr_bounds;
370 struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
371 struct hash_map<tree, tree> *chkp_bounds_map;
372 struct hash_map<tree, tree> *chkp_static_var_bounds;
373
374 static bool in_chkp_pass;
375
376 #define CHKP_BOUND_TMP_NAME "__bound_tmp"
377 #define CHKP_SIZE_TMP_NAME "__size_tmp"
378 #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
379 #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
380 #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
381 #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
382 #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
383
384 /* Static checker constructors may become very large and their
385 compilation with optimization may take too much time.
386 Therefore we put a limit to number of statements in one
387 constructor. Tests with 100 000 statically initialized
388 pointers showed following compilation times on Sandy Bridge
389 server (used -O2):
390 limit 100 => ~18 sec.
391 limit 300 => ~22 sec.
392 limit 1000 => ~30 sec.
393 limit 3000 => ~49 sec.
394 limit 5000 => ~55 sec.
395 limit 10000 => ~76 sec.
396 limit 100000 => ~532 sec. */
397 #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
398
399 struct chkp_ctor_stmt_list
400 {
401 tree stmts;
402 int avail;
403 };
404
405 /* Return 1 if function FNDECL is instrumented by Pointer
406 Bounds Checker. */
407 bool
408 chkp_function_instrumented_p (tree fndecl)
409 {
410 return fndecl
411 && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
412 }
413
414 /* Mark function FNDECL as instrumented. */
415 void
416 chkp_function_mark_instrumented (tree fndecl)
417 {
418 if (chkp_function_instrumented_p (fndecl))
419 return;
420
421 DECL_ATTRIBUTES (fndecl)
422 = tree_cons (get_identifier ("chkp instrumented"), NULL,
423 DECL_ATTRIBUTES (fndecl));
424 }
425
426 /* Return true when STMT is builtin call to instrumentation function
427 corresponding to CODE. */
428
429 bool
430 chkp_gimple_call_builtin_p (gimple *call,
431 enum built_in_function code)
432 {
433 tree fndecl;
434 if (is_gimple_call (call)
435 && (fndecl = targetm.builtin_chkp_function (code))
436 && gimple_call_fndecl (call) == fndecl)
437 return true;
438 return false;
439 }
440
441 /* Emit code to build zero bounds and return RTL holding
442 the result. */
443 rtx
444 chkp_expand_zero_bounds ()
445 {
446 tree zero_bnd;
447
448 if (flag_chkp_use_static_const_bounds)
449 zero_bnd = chkp_get_zero_bounds_var ();
450 else
451 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
452 integer_zero_node);
453 return expand_normal (zero_bnd);
454 }
455
456 /* Emit code to store zero bounds for PTR located at MEM. */
457 void
458 chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
459 {
460 tree zero_bnd, bnd, addr, bndstx;
461
462 if (flag_chkp_use_static_const_bounds)
463 zero_bnd = chkp_get_zero_bounds_var ();
464 else
465 zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
466 integer_zero_node);
467 bnd = make_tree (pointer_bounds_type_node,
468 assign_temp (pointer_bounds_type_node, 0, 1));
469 addr = build1 (ADDR_EXPR,
470 build_pointer_type (TREE_TYPE (mem)), mem);
471 bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
472
473 expand_assignment (bnd, zero_bnd, false);
474 expand_normal (bndstx);
475 }
476
477 /* Build retbnd call for returned value RETVAL.
478
479 If BNDVAL is not NULL then result is stored
480 in it. Otherwise a temporary is created to
481 hold returned value.
482
483 GSI points to a position for a retbnd call
484 and is set to created stmt.
485
486 Cgraph edge is created for a new call if
487 UPDATE_EDGE is 1.
488
489 Obtained bounds are returned. */
490 tree
491 chkp_insert_retbnd_call (tree bndval, tree retval,
492 gimple_stmt_iterator *gsi)
493 {
494 gimple *call;
495
496 if (!bndval)
497 bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
498
499 call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
500 gimple_call_set_lhs (call, bndval);
501 gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
502
503 return bndval;
504 }
505
506 /* Build a GIMPLE_CALL identical to CALL but skipping bounds
507 arguments. */
508
509 gcall *
510 chkp_copy_call_skip_bounds (gcall *call)
511 {
512 bitmap bounds;
513 unsigned i;
514
515 bitmap_obstack_initialize (NULL);
516 bounds = BITMAP_ALLOC (NULL);
517
518 for (i = 0; i < gimple_call_num_args (call); i++)
519 if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
520 bitmap_set_bit (bounds, i);
521
522 if (!bitmap_empty_p (bounds))
523 call = gimple_call_copy_skip_args (call, bounds);
524 gimple_call_set_with_bounds (call, false);
525
526 BITMAP_FREE (bounds);
527 bitmap_obstack_release (NULL);
528
529 return call;
530 }
531
532 /* Redirect edge E to the correct node according to call_stmt.
533 Return 1 if bounds removal from call_stmt should be done
534 instead of redirection. */
535
536 bool
537 chkp_redirect_edge (cgraph_edge *e)
538 {
539 bool instrumented = false;
540 tree decl = e->callee->decl;
541
542 if (e->callee->instrumentation_clone
543 || chkp_function_instrumented_p (decl))
544 instrumented = true;
545
546 if (instrumented
547 && !gimple_call_with_bounds_p (e->call_stmt))
548 e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
549 else if (!instrumented
550 && gimple_call_with_bounds_p (e->call_stmt)
551 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
552 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
553 && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
554 {
555 if (e->callee->instrumented_version)
556 e->redirect_callee (e->callee->instrumented_version);
557 else
558 {
559 tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
560 /* Avoid bounds removal if all args will be removed. */
561 if (!args || TREE_VALUE (args) != void_type_node)
562 return true;
563 else
564 gimple_call_set_with_bounds (e->call_stmt, false);
565 }
566 }
567
568 return false;
569 }
570
571 /* Mark statement S to not be instrumented. */
572 static void
573 chkp_mark_stmt (gimple *s)
574 {
575 gimple_set_plf (s, GF_PLF_1, true);
576 }
577
578 /* Mark statement S to be instrumented. */
579 static void
580 chkp_unmark_stmt (gimple *s)
581 {
582 gimple_set_plf (s, GF_PLF_1, false);
583 }
584
585 /* Return 1 if statement S should not be instrumented. */
586 static bool
587 chkp_marked_stmt_p (gimple *s)
588 {
589 return gimple_plf (s, GF_PLF_1);
590 }
591
592 /* Get var to be used for bound temps. */
593 static tree
594 chkp_get_tmp_var (void)
595 {
596 if (!tmp_var)
597 tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
598
599 return tmp_var;
600 }
601
602 /* Get SSA_NAME to be used as temp. */
603 static tree
604 chkp_get_tmp_reg (gimple *stmt)
605 {
606 if (in_chkp_pass)
607 return make_ssa_name (chkp_get_tmp_var (), stmt);
608
609 return make_temp_ssa_name (pointer_bounds_type_node, stmt,
610 CHKP_BOUND_TMP_NAME);
611 }
612
613 /* Get var to be used for size temps. */
614 static tree
615 chkp_get_size_tmp_var (void)
616 {
617 if (!size_tmp_var)
618 size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
619
620 return size_tmp_var;
621 }
622
623 /* Register bounds BND for address of OBJ. */
624 static void
625 chkp_register_addr_bounds (tree obj, tree bnd)
626 {
627 if (bnd == incomplete_bounds)
628 return;
629
630 chkp_reg_addr_bounds->put (obj, bnd);
631
632 if (dump_file && (dump_flags & TDF_DETAILS))
633 {
634 fprintf (dump_file, "Regsitered bound ");
635 print_generic_expr (dump_file, bnd, 0);
636 fprintf (dump_file, " for address of ");
637 print_generic_expr (dump_file, obj, 0);
638 fprintf (dump_file, "\n");
639 }
640 }
641
642 /* Return bounds registered for address of OBJ. */
643 static tree
644 chkp_get_registered_addr_bounds (tree obj)
645 {
646 tree *slot = chkp_reg_addr_bounds->get (obj);
647 return slot ? *slot : NULL_TREE;
648 }
649
650 /* Mark BOUNDS as completed. */
651 static void
652 chkp_mark_completed_bounds (tree bounds)
653 {
654 chkp_completed_bounds_set->add (bounds);
655
656 if (dump_file && (dump_flags & TDF_DETAILS))
657 {
658 fprintf (dump_file, "Marked bounds ");
659 print_generic_expr (dump_file, bounds, 0);
660 fprintf (dump_file, " as completed\n");
661 }
662 }
663
664 /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
665 static bool
666 chkp_completed_bounds (tree bounds)
667 {
668 return chkp_completed_bounds_set->contains (bounds);
669 }
670
671 /* Clear comleted bound marks. */
672 static void
673 chkp_erase_completed_bounds (void)
674 {
675 delete chkp_completed_bounds_set;
676 chkp_completed_bounds_set = new hash_set<tree>;
677 }
678
679 /* Mark BOUNDS associated with PTR as incomplete. */
680 static void
681 chkp_register_incomplete_bounds (tree bounds, tree ptr)
682 {
683 chkp_incomplete_bounds_map->put (bounds, ptr);
684
685 if (dump_file && (dump_flags & TDF_DETAILS))
686 {
687 fprintf (dump_file, "Regsitered incomplete bounds ");
688 print_generic_expr (dump_file, bounds, 0);
689 fprintf (dump_file, " for ");
690 print_generic_expr (dump_file, ptr, 0);
691 fprintf (dump_file, "\n");
692 }
693 }
694
695 /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
696 static bool
697 chkp_incomplete_bounds (tree bounds)
698 {
699 if (bounds == incomplete_bounds)
700 return true;
701
702 if (chkp_completed_bounds (bounds))
703 return false;
704
705 return chkp_incomplete_bounds_map->get (bounds) != NULL;
706 }
707
708 /* Clear incomleted bound marks. */
709 static void
710 chkp_erase_incomplete_bounds (void)
711 {
712 delete chkp_incomplete_bounds_map;
713 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
714 }
715
716 /* Build and return bndmk call which creates bounds for structure
717 pointed by PTR. Structure should have complete type. */
718 tree
719 chkp_make_bounds_for_struct_addr (tree ptr)
720 {
721 tree type = TREE_TYPE (ptr);
722 tree size;
723
724 gcc_assert (POINTER_TYPE_P (type));
725
726 size = TYPE_SIZE (TREE_TYPE (type));
727
728 gcc_assert (size);
729
730 return build_call_nary (pointer_bounds_type_node,
731 build_fold_addr_expr (chkp_bndmk_fndecl),
732 2, ptr, size);
733 }
734
735 /* Traversal function for chkp_may_finish_incomplete_bounds.
736 Set RES to 0 if at least one argument of phi statement
737 defining bounds (passed in KEY arg) is unknown.
738 Traversal stops when first unknown phi argument is found. */
739 bool
740 chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
741 bool *res)
742 {
743 gimple *phi;
744 unsigned i;
745
746 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
747
748 phi = SSA_NAME_DEF_STMT (bounds);
749
750 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
751
752 for (i = 0; i < gimple_phi_num_args (phi); i++)
753 {
754 tree phi_arg = gimple_phi_arg_def (phi, i);
755 if (!phi_arg)
756 {
757 *res = false;
758 /* Do not need to traverse further. */
759 return false;
760 }
761 }
762
763 return true;
764 }
765
766 /* Return 1 if all phi nodes created for bounds have their
767 arguments computed. */
768 static bool
769 chkp_may_finish_incomplete_bounds (void)
770 {
771 bool res = true;
772
773 chkp_incomplete_bounds_map
774 ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
775
776 return res;
777 }
778
779 /* Helper function for chkp_finish_incomplete_bounds.
780 Recompute args for bounds phi node. */
781 bool
782 chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
783 void *res ATTRIBUTE_UNUSED)
784 {
785 tree ptr = *slot;
786 gphi *bounds_phi;
787 gphi *ptr_phi;
788 unsigned i;
789
790 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
791 gcc_assert (TREE_CODE (ptr) == SSA_NAME);
792
793 bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
794 ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
795
796 for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
797 {
798 tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
799 tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
800
801 add_phi_arg (bounds_phi, bound_arg,
802 gimple_phi_arg_edge (ptr_phi, i),
803 UNKNOWN_LOCATION);
804 }
805
806 return true;
807 }
808
809 /* Mark BOUNDS as invalid. */
810 static void
811 chkp_mark_invalid_bounds (tree bounds)
812 {
813 chkp_invalid_bounds->add (bounds);
814
815 if (dump_file && (dump_flags & TDF_DETAILS))
816 {
817 fprintf (dump_file, "Marked bounds ");
818 print_generic_expr (dump_file, bounds, 0);
819 fprintf (dump_file, " as invalid\n");
820 }
821 }
822
823 /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
824 static bool
825 chkp_valid_bounds (tree bounds)
826 {
827 if (bounds == zero_bounds || bounds == none_bounds)
828 return false;
829
830 return !chkp_invalid_bounds->contains (bounds);
831 }
832
833 /* Helper function for chkp_finish_incomplete_bounds.
834 Check all arguments of phi nodes trying to find
835 valid completed bounds. If there is at least one
836 such arg then bounds produced by phi node are marked
837 as valid completed bounds and all phi args are
838 recomputed. */
839 bool
840 chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
841 {
842 gimple *phi;
843 unsigned i;
844
845 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
846
847 if (chkp_completed_bounds (bounds))
848 return true;
849
850 phi = SSA_NAME_DEF_STMT (bounds);
851
852 gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
853
854 for (i = 0; i < gimple_phi_num_args (phi); i++)
855 {
856 tree phi_arg = gimple_phi_arg_def (phi, i);
857
858 gcc_assert (phi_arg);
859
860 if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
861 {
862 *res = true;
863 chkp_mark_completed_bounds (bounds);
864 chkp_recompute_phi_bounds (bounds, slot, NULL);
865 return true;
866 }
867 }
868
869 return true;
870 }
871
872 /* Helper function for chkp_finish_incomplete_bounds.
873 Marks all incompleted bounds as invalid. */
874 bool
875 chkp_mark_invalid_bounds_walker (tree const &bounds,
876 tree *slot ATTRIBUTE_UNUSED,
877 void *res ATTRIBUTE_UNUSED)
878 {
879 if (!chkp_completed_bounds (bounds))
880 {
881 chkp_mark_invalid_bounds (bounds);
882 chkp_mark_completed_bounds (bounds);
883 }
884 return true;
885 }
886
887 /* When all bound phi nodes have all their args computed
888 we have enough info to find valid bounds. We iterate
889 through all incompleted bounds searching for valid
890 bounds. Found valid bounds are marked as completed
891 and all remaining incompleted bounds are recomputed.
892 Process continues until no new valid bounds may be
893 found. All remained incompleted bounds are marked as
894 invalid (i.e. have no valid source of bounds). */
895 static void
896 chkp_finish_incomplete_bounds (void)
897 {
898 bool found_valid = true;
899
900 while (found_valid)
901 {
902 found_valid = false;
903
904 chkp_incomplete_bounds_map->
905 traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
906
907 if (found_valid)
908 chkp_incomplete_bounds_map->
909 traverse<void *, chkp_recompute_phi_bounds> (NULL);
910 }
911
912 chkp_incomplete_bounds_map->
913 traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
914 chkp_incomplete_bounds_map->
915 traverse<void *, chkp_recompute_phi_bounds> (NULL);
916
917 chkp_erase_completed_bounds ();
918 chkp_erase_incomplete_bounds ();
919 }
920
921 /* Return 1 if type TYPE is a pointer type or a
922 structure having a pointer type as one of its fields.
923 Otherwise return 0. */
924 bool
925 chkp_type_has_pointer (const_tree type)
926 {
927 bool res = false;
928
929 if (BOUNDED_TYPE_P (type))
930 res = true;
931 else if (RECORD_OR_UNION_TYPE_P (type))
932 {
933 tree field;
934
935 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
936 if (TREE_CODE (field) == FIELD_DECL)
937 res = res || chkp_type_has_pointer (TREE_TYPE (field));
938 }
939 else if (TREE_CODE (type) == ARRAY_TYPE)
940 res = chkp_type_has_pointer (TREE_TYPE (type));
941
942 return res;
943 }
944
945 unsigned
946 chkp_type_bounds_count (const_tree type)
947 {
948 unsigned res = 0;
949
950 if (!type)
951 res = 0;
952 else if (BOUNDED_TYPE_P (type))
953 res = 1;
954 else if (RECORD_OR_UNION_TYPE_P (type))
955 {
956 bitmap have_bound;
957
958 bitmap_obstack_initialize (NULL);
959 have_bound = BITMAP_ALLOC (NULL);
960 chkp_find_bound_slots (type, have_bound);
961 res = bitmap_count_bits (have_bound);
962 BITMAP_FREE (have_bound);
963 bitmap_obstack_release (NULL);
964 }
965
966 return res;
967 }
968
969 /* Get bounds associated with NODE via
970 chkp_set_bounds call. */
971 tree
972 chkp_get_bounds (tree node)
973 {
974 tree *slot;
975
976 if (!chkp_bounds_map)
977 return NULL_TREE;
978
979 slot = chkp_bounds_map->get (node);
980 return slot ? *slot : NULL_TREE;
981 }
982
983 /* Associate bounds VAL with NODE. */
984 void
985 chkp_set_bounds (tree node, tree val)
986 {
987 if (!chkp_bounds_map)
988 chkp_bounds_map = new hash_map<tree, tree>;
989
990 chkp_bounds_map->put (node, val);
991 }
992
993 /* Check if statically initialized variable VAR require
994 static bounds initialization. If VAR is added into
995 bounds initlization list then 1 is returned. Otherwise
996 return 0. */
997 extern bool
998 chkp_register_var_initializer (tree var)
999 {
1000 if (!flag_check_pointer_bounds
1001 || DECL_INITIAL (var) == error_mark_node)
1002 return false;
1003
1004 gcc_assert (TREE_CODE (var) == VAR_DECL);
1005 gcc_assert (DECL_INITIAL (var));
1006
1007 if (TREE_STATIC (var)
1008 && chkp_type_has_pointer (TREE_TYPE (var)))
1009 {
1010 varpool_node::get_create (var)->need_bounds_init = 1;
1011 return true;
1012 }
1013
1014 return false;
1015 }
1016
1017 /* Helper function for chkp_finish_file.
1018
1019 Add new modification statement (RHS is assigned to LHS)
1020 into list of static initializer statementes (passed in ARG).
1021 If statements list becomes too big, emit checker constructor
1022 and start the new one. */
1023 static void
1024 chkp_add_modification_to_stmt_list (tree lhs,
1025 tree rhs,
1026 void *arg)
1027 {
1028 struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
1029 tree modify;
1030
1031 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1032 rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1033
1034 modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1035 append_to_statement_list (modify, &stmts->stmts);
1036
1037 stmts->avail--;
1038 }
1039
1040 /* Build and return ADDR_EXPR for specified object OBJ. */
1041 static tree
1042 chkp_build_addr_expr (tree obj)
1043 {
1044 return TREE_CODE (obj) == TARGET_MEM_REF
1045 ? tree_mem_ref_addr (ptr_type_node, obj)
1046 : build_fold_addr_expr (obj);
1047 }
1048
1049 /* Helper function for chkp_finish_file.
1050 Initialize bound variable BND_VAR with bounds of variable
1051 VAR to statements list STMTS. If statements list becomes
1052 too big, emit checker constructor and start the new one. */
1053 static void
1054 chkp_output_static_bounds (tree bnd_var, tree var,
1055 struct chkp_ctor_stmt_list *stmts)
1056 {
1057 tree lb, ub, size;
1058
1059 if (TREE_CODE (var) == STRING_CST)
1060 {
1061 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1062 size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
1063 }
1064 else if (DECL_SIZE (var)
1065 && !chkp_variable_size_type (TREE_TYPE (var)))
1066 {
1067 /* Compute bounds using statically known size. */
1068 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1069 size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
1070 }
1071 else
1072 {
1073 /* Compute bounds using dynamic size. */
1074 tree call;
1075
1076 lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
1077 call = build1 (ADDR_EXPR,
1078 build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
1079 chkp_sizeof_fndecl);
1080 size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
1081 call, 1, var);
1082
1083 if (flag_chkp_zero_dynamic_size_as_infinite)
1084 {
1085 tree max_size, cond;
1086
1087 max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
1088 cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
1089 size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
1090 }
1091
1092 size = size_binop (MINUS_EXPR, size, size_one_node);
1093 }
1094
1095 ub = size_binop (PLUS_EXPR, lb, size);
1096 stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
1097 &stmts->stmts);
1098 if (stmts->avail <= 0)
1099 {
1100 cgraph_build_static_cdtor ('B', stmts->stmts,
1101 MAX_RESERVED_INIT_PRIORITY + 2);
1102 stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
1103 stmts->stmts = NULL;
1104 }
1105 }
1106
1107 /* Return entry block to be used for checker initilization code.
1108 Create new block if required. */
1109 static basic_block
1110 chkp_get_entry_block (void)
1111 {
1112 if (!entry_block)
1113 entry_block
1114 = split_block_after_labels (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest;
1115
1116 return entry_block;
1117 }
1118
1119 /* Return a bounds var to be used for pointer var PTR_VAR. */
1120 static tree
1121 chkp_get_bounds_var (tree ptr_var)
1122 {
1123 tree bnd_var;
1124 tree *slot;
1125
1126 slot = chkp_bound_vars->get (ptr_var);
1127 if (slot)
1128 bnd_var = *slot;
1129 else
1130 {
1131 bnd_var = create_tmp_reg (pointer_bounds_type_node,
1132 CHKP_BOUND_TMP_NAME);
1133 chkp_bound_vars->put (ptr_var, bnd_var);
1134 }
1135
1136 return bnd_var;
1137 }
1138
1139 /* If BND is an abnormal bounds copy, return a copied value.
1140 Otherwise return BND. */
1141 static tree
1142 chkp_get_orginal_bounds_for_abnormal_copy (tree bnd)
1143 {
1144 if (bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1145 {
1146 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1147 gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
1148 bnd = gimple_assign_rhs1 (bnd_def);
1149 }
1150
1151 return bnd;
1152 }
1153
1154 /* Register bounds BND for object PTR in global bounds table.
1155 A copy of bounds may be created for abnormal ssa names.
1156 Returns bounds to use for PTR. */
1157 static tree
1158 chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
1159 {
1160 bool abnormal_ptr;
1161
1162 if (!chkp_reg_bounds)
1163 return bnd;
1164
1165 /* Do nothing if bounds are incomplete_bounds
1166 because it means bounds will be recomputed. */
1167 if (bnd == incomplete_bounds)
1168 return bnd;
1169
1170 abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
1171 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1172 && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
1173
1174 /* A single bounds value may be reused multiple times for
1175 different pointer values. It may cause coalescing issues
1176 for abnormal SSA names. To avoid it we create a bounds
1177 copy in case it is computed for abnormal SSA name.
1178
1179 We also cannot reuse such created copies for other pointers */
1180 if (abnormal_ptr
1181 || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
1182 {
1183 tree bnd_var = NULL_TREE;
1184
1185 if (abnormal_ptr)
1186 {
1187 if (SSA_NAME_VAR (ptr))
1188 bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
1189 }
1190 else
1191 bnd_var = chkp_get_tmp_var ();
1192
1193 /* For abnormal copies we may just find original
1194 bounds and use them. */
1195 if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
1196 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1197 /* For undefined values we usually use none bounds
1198 value but in case of abnormal edge it may cause
1199 coalescing failures. Use default definition of
1200 bounds variable instead to avoid it. */
1201 else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
1202 && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
1203 {
1204 bnd = get_or_create_ssa_default_def (cfun, bnd_var);
1205
1206 if (dump_file && (dump_flags & TDF_DETAILS))
1207 {
1208 fprintf (dump_file, "Using default def bounds ");
1209 print_generic_expr (dump_file, bnd, 0);
1210 fprintf (dump_file, " for abnormal default def SSA name ");
1211 print_generic_expr (dump_file, ptr, 0);
1212 fprintf (dump_file, "\n");
1213 }
1214 }
1215 else
1216 {
1217 tree copy;
1218 gimple *def = SSA_NAME_DEF_STMT (ptr);
1219 gimple *assign;
1220 gimple_stmt_iterator gsi;
1221
1222 if (bnd_var)
1223 copy = make_ssa_name (bnd_var);
1224 else
1225 copy = make_temp_ssa_name (pointer_bounds_type_node,
1226 NULL,
1227 CHKP_BOUND_TMP_NAME);
1228 bnd = chkp_get_orginal_bounds_for_abnormal_copy (bnd);
1229 assign = gimple_build_assign (copy, bnd);
1230
1231 if (dump_file && (dump_flags & TDF_DETAILS))
1232 {
1233 fprintf (dump_file, "Creating a copy of bounds ");
1234 print_generic_expr (dump_file, bnd, 0);
1235 fprintf (dump_file, " for abnormal SSA name ");
1236 print_generic_expr (dump_file, ptr, 0);
1237 fprintf (dump_file, "\n");
1238 }
1239
1240 if (gimple_code (def) == GIMPLE_NOP)
1241 {
1242 gsi = gsi_last_bb (chkp_get_entry_block ());
1243 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
1244 gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
1245 else
1246 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1247 }
1248 else
1249 {
1250 gimple *bnd_def = SSA_NAME_DEF_STMT (bnd);
1251 /* Sometimes (e.g. when we load a pointer from a
1252 memory) bounds are produced later than a pointer.
1253 We need to insert bounds copy appropriately. */
1254 if (gimple_code (bnd_def) != GIMPLE_NOP
1255 && stmt_dominates_stmt_p (def, bnd_def))
1256 gsi = gsi_for_stmt (bnd_def);
1257 else
1258 gsi = gsi_for_stmt (def);
1259 gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
1260 }
1261
1262 bnd = copy;
1263 }
1264
1265 if (abnormal_ptr)
1266 bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
1267 }
1268
1269 chkp_reg_bounds->put (ptr, bnd);
1270
1271 if (dump_file && (dump_flags & TDF_DETAILS))
1272 {
1273 fprintf (dump_file, "Regsitered bound ");
1274 print_generic_expr (dump_file, bnd, 0);
1275 fprintf (dump_file, " for pointer ");
1276 print_generic_expr (dump_file, ptr, 0);
1277 fprintf (dump_file, "\n");
1278 }
1279
1280 return bnd;
1281 }
1282
1283 /* Get bounds registered for object PTR in global bounds table. */
1284 static tree
1285 chkp_get_registered_bounds (tree ptr)
1286 {
1287 tree *slot;
1288
1289 if (!chkp_reg_bounds)
1290 return NULL_TREE;
1291
1292 slot = chkp_reg_bounds->get (ptr);
1293 return slot ? *slot : NULL_TREE;
1294 }
1295
1296 /* Add bound retvals to return statement pointed by GSI. */
1297
1298 static void
1299 chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
1300 {
1301 greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
1302 tree retval = gimple_return_retval (ret);
1303 tree ret_decl = DECL_RESULT (cfun->decl);
1304 tree bounds;
1305
1306 if (!retval)
1307 return;
1308
1309 if (BOUNDED_P (ret_decl))
1310 {
1311 bounds = chkp_find_bounds (retval, gsi);
1312 bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
1313 gimple_return_set_retbnd (ret, bounds);
1314 }
1315
1316 update_stmt (ret);
1317 }
1318
1319 /* Force OP to be suitable for using as an argument for call.
1320 New statements (if any) go to SEQ. */
1321 static tree
1322 chkp_force_gimple_call_op (tree op, gimple_seq *seq)
1323 {
1324 gimple_seq stmts;
1325 gimple_stmt_iterator si;
1326
1327 op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
1328
1329 for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
1330 chkp_mark_stmt (gsi_stmt (si));
1331
1332 gimple_seq_add_seq (seq, stmts);
1333
1334 return op;
1335 }
1336
1337 /* Generate lower bound check for memory access by ADDR.
1338 Check is inserted before the position pointed by ITER.
1339 DIRFLAG indicates whether memory access is load or store. */
1340 static void
1341 chkp_check_lower (tree addr, tree bounds,
1342 gimple_stmt_iterator iter,
1343 location_t location,
1344 tree dirflag)
1345 {
1346 gimple_seq seq;
1347 gimple *check;
1348 tree node;
1349
1350 if (!chkp_function_instrumented_p (current_function_decl)
1351 && bounds == chkp_get_zero_bounds ())
1352 return;
1353
1354 if (dirflag == integer_zero_node
1355 && !flag_chkp_check_read)
1356 return;
1357
1358 if (dirflag == integer_one_node
1359 && !flag_chkp_check_write)
1360 return;
1361
1362 seq = NULL;
1363
1364 node = chkp_force_gimple_call_op (addr, &seq);
1365
1366 check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
1367 chkp_mark_stmt (check);
1368 gimple_call_set_with_bounds (check, true);
1369 gimple_set_location (check, location);
1370 gimple_seq_add_stmt (&seq, check);
1371
1372 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1373
1374 if (dump_file && (dump_flags & TDF_DETAILS))
1375 {
1376 gimple *before = gsi_stmt (iter);
1377 fprintf (dump_file, "Generated lower bound check for statement ");
1378 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1379 fprintf (dump_file, " ");
1380 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1381 }
1382 }
1383
1384 /* Generate upper bound check for memory access by ADDR.
1385 Check is inserted before the position pointed by ITER.
1386 DIRFLAG indicates whether memory access is load or store. */
1387 static void
1388 chkp_check_upper (tree addr, tree bounds,
1389 gimple_stmt_iterator iter,
1390 location_t location,
1391 tree dirflag)
1392 {
1393 gimple_seq seq;
1394 gimple *check;
1395 tree node;
1396
1397 if (!chkp_function_instrumented_p (current_function_decl)
1398 && bounds == chkp_get_zero_bounds ())
1399 return;
1400
1401 if (dirflag == integer_zero_node
1402 && !flag_chkp_check_read)
1403 return;
1404
1405 if (dirflag == integer_one_node
1406 && !flag_chkp_check_write)
1407 return;
1408
1409 seq = NULL;
1410
1411 node = chkp_force_gimple_call_op (addr, &seq);
1412
1413 check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
1414 chkp_mark_stmt (check);
1415 gimple_call_set_with_bounds (check, true);
1416 gimple_set_location (check, location);
1417 gimple_seq_add_stmt (&seq, check);
1418
1419 gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
1420
1421 if (dump_file && (dump_flags & TDF_DETAILS))
1422 {
1423 gimple *before = gsi_stmt (iter);
1424 fprintf (dump_file, "Generated upper bound check for statement ");
1425 print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
1426 fprintf (dump_file, " ");
1427 print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
1428 }
1429 }
1430
1431 /* Generate lower and upper bound checks for memory access
1432 to memory slot [FIRST, LAST] againsr BOUNDS. Checks
1433 are inserted before the position pointed by ITER.
1434 DIRFLAG indicates whether memory access is load or store. */
1435 void
1436 chkp_check_mem_access (tree first, tree last, tree bounds,
1437 gimple_stmt_iterator iter,
1438 location_t location,
1439 tree dirflag)
1440 {
1441 chkp_check_lower (first, bounds, iter, location, dirflag);
1442 chkp_check_upper (last, bounds, iter, location, dirflag);
1443 }
1444
1445 /* Replace call to _bnd_chk_* pointed by GSI with
1446 bndcu and bndcl calls. DIRFLAG determines whether
1447 check is for read or write. */
1448
1449 void
1450 chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
1451 tree dirflag)
1452 {
1453 gimple_stmt_iterator call_iter = *gsi;
1454 gimple *call = gsi_stmt (*gsi);
1455 tree fndecl = gimple_call_fndecl (call);
1456 tree addr = gimple_call_arg (call, 0);
1457 tree bounds = chkp_find_bounds (addr, gsi);
1458
1459 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1460 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1461 chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
1462
1463 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
1464 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1465
1466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
1467 {
1468 tree size = gimple_call_arg (call, 1);
1469 addr = fold_build_pointer_plus (addr, size);
1470 addr = fold_build_pointer_plus_hwi (addr, -1);
1471 chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
1472 }
1473
1474 gsi_remove (&call_iter, true);
1475 }
1476
1477 /* Replace call to _bnd_get_ptr_* pointed by GSI with
1478 corresponding bounds extract call. */
1479
1480 void
1481 chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
1482 {
1483 gimple *call = gsi_stmt (*gsi);
1484 tree fndecl = gimple_call_fndecl (call);
1485 tree addr = gimple_call_arg (call, 0);
1486 tree bounds = chkp_find_bounds (addr, gsi);
1487 gimple *extract;
1488
1489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
1490 fndecl = chkp_extract_lower_fndecl;
1491 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
1492 fndecl = chkp_extract_upper_fndecl;
1493 else
1494 gcc_unreachable ();
1495
1496 extract = gimple_build_call (fndecl, 1, bounds);
1497 gimple_call_set_lhs (extract, gimple_call_lhs (call));
1498 chkp_mark_stmt (extract);
1499
1500 gsi_replace (gsi, extract, false);
1501 }
1502
1503 /* Return COMPONENT_REF accessing FIELD in OBJ. */
1504 static tree
1505 chkp_build_component_ref (tree obj, tree field)
1506 {
1507 tree res;
1508
1509 /* If object is TMR then we do not use component_ref but
1510 add offset instead. We need it to be able to get addr
1511 of the reasult later. */
1512 if (TREE_CODE (obj) == TARGET_MEM_REF)
1513 {
1514 tree offs = TMR_OFFSET (obj);
1515 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1516 offs, DECL_FIELD_OFFSET (field));
1517
1518 gcc_assert (offs);
1519
1520 res = copy_node (obj);
1521 TREE_TYPE (res) = TREE_TYPE (field);
1522 TMR_OFFSET (res) = offs;
1523 }
1524 else
1525 res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
1526
1527 return res;
1528 }
1529
1530 /* Return ARRAY_REF for array ARR and index IDX with
1531 specified element type ETYPE and element size ESIZE. */
1532 static tree
1533 chkp_build_array_ref (tree arr, tree etype, tree esize,
1534 unsigned HOST_WIDE_INT idx)
1535 {
1536 tree index = build_int_cst (size_type_node, idx);
1537 tree res;
1538
1539 /* If object is TMR then we do not use array_ref but
1540 add offset instead. We need it to be able to get addr
1541 of the reasult later. */
1542 if (TREE_CODE (arr) == TARGET_MEM_REF)
1543 {
1544 tree offs = TMR_OFFSET (arr);
1545
1546 esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
1547 esize, index);
1548 gcc_assert(esize);
1549
1550 offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
1551 offs, esize);
1552 gcc_assert (offs);
1553
1554 res = copy_node (arr);
1555 TREE_TYPE (res) = etype;
1556 TMR_OFFSET (res) = offs;
1557 }
1558 else
1559 res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
1560
1561 return res;
1562 }
1563
1564 /* Helper function for chkp_add_bounds_to_call_stmt.
1565 Fill ALL_BOUNDS output array with created bounds.
1566
1567 OFFS is used for recursive calls and holds basic
1568 offset of TYPE in outer structure in bits.
1569
1570 ITER points a position where bounds are searched.
1571
1572 ALL_BOUNDS[i] is filled with elem bounds if there
1573 is a field in TYPE which has pointer type and offset
1574 equal to i * POINTER_SIZE in bits. */
1575 static void
1576 chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
1577 HOST_WIDE_INT offs,
1578 gimple_stmt_iterator *iter)
1579 {
1580 tree type = TREE_TYPE (elem);
1581
1582 if (BOUNDED_TYPE_P (type))
1583 {
1584 if (!all_bounds[offs / POINTER_SIZE])
1585 {
1586 tree temp = make_temp_ssa_name (type, NULL, "");
1587 gimple *assign = gimple_build_assign (temp, elem);
1588 gimple_stmt_iterator gsi;
1589
1590 gsi_insert_before (iter, assign, GSI_SAME_STMT);
1591 gsi = gsi_for_stmt (assign);
1592
1593 all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
1594 }
1595 }
1596 else if (RECORD_OR_UNION_TYPE_P (type))
1597 {
1598 tree field;
1599
1600 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1601 if (TREE_CODE (field) == FIELD_DECL)
1602 {
1603 tree base = unshare_expr (elem);
1604 tree field_ref = chkp_build_component_ref (base, field);
1605 HOST_WIDE_INT field_offs
1606 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1607 if (DECL_FIELD_OFFSET (field))
1608 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1609
1610 chkp_find_bounds_for_elem (field_ref, all_bounds,
1611 offs + field_offs, iter);
1612 }
1613 }
1614 else if (TREE_CODE (type) == ARRAY_TYPE)
1615 {
1616 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1617 tree etype = TREE_TYPE (type);
1618 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1619 unsigned HOST_WIDE_INT cur;
1620
1621 if (!maxval || integer_minus_onep (maxval))
1622 return;
1623
1624 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1625 {
1626 tree base = unshare_expr (elem);
1627 tree arr_elem = chkp_build_array_ref (base, etype,
1628 TYPE_SIZE (etype),
1629 cur);
1630 chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
1631 iter);
1632 }
1633 }
1634 }
1635
1636 /* Fill HAVE_BOUND output bitmap with information about
1637 bounds requred for object of type TYPE.
1638
1639 OFFS is used for recursive calls and holds basic
1640 offset of TYPE in outer structure in bits.
1641
1642 HAVE_BOUND[i] is set to 1 if there is a field
1643 in TYPE which has pointer type and offset
1644 equal to i * POINTER_SIZE - OFFS in bits. */
1645 void
1646 chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
1647 HOST_WIDE_INT offs)
1648 {
1649 if (BOUNDED_TYPE_P (type))
1650 bitmap_set_bit (have_bound, offs / POINTER_SIZE);
1651 else if (RECORD_OR_UNION_TYPE_P (type))
1652 {
1653 tree field;
1654
1655 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1656 if (TREE_CODE (field) == FIELD_DECL)
1657 {
1658 HOST_WIDE_INT field_offs = 0;
1659 if (DECL_FIELD_BIT_OFFSET (field))
1660 field_offs += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1661 if (DECL_FIELD_OFFSET (field))
1662 field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
1663 chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
1664 offs + field_offs);
1665 }
1666 }
1667 else if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
1668 {
1669 /* The object type is an array of complete type, i.e., other
1670 than a flexible array. */
1671 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1672 tree etype = TREE_TYPE (type);
1673 HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
1674 unsigned HOST_WIDE_INT cur;
1675
1676 if (!maxval
1677 || TREE_CODE (maxval) != INTEGER_CST
1678 || integer_minus_onep (maxval))
1679 return;
1680
1681 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
1682 chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
1683 }
1684 }
1685
1686 /* Fill bitmap RES with information about bounds for
1687 type TYPE. See chkp_find_bound_slots_1 for more
1688 details. */
1689 void
1690 chkp_find_bound_slots (const_tree type, bitmap res)
1691 {
1692 bitmap_clear (res);
1693 chkp_find_bound_slots_1 (type, res, 0);
1694 }
1695
1696 /* Return 1 if call to FNDECL should be instrumented
1697 and 0 otherwise. */
1698
1699 static bool
1700 chkp_instrument_normal_builtin (tree fndecl)
1701 {
1702 switch (DECL_FUNCTION_CODE (fndecl))
1703 {
1704 case BUILT_IN_STRLEN:
1705 case BUILT_IN_STRCPY:
1706 case BUILT_IN_STRNCPY:
1707 case BUILT_IN_STPCPY:
1708 case BUILT_IN_STPNCPY:
1709 case BUILT_IN_STRCAT:
1710 case BUILT_IN_STRNCAT:
1711 case BUILT_IN_MEMCPY:
1712 case BUILT_IN_MEMPCPY:
1713 case BUILT_IN_MEMSET:
1714 case BUILT_IN_MEMMOVE:
1715 case BUILT_IN_BZERO:
1716 case BUILT_IN_STRCMP:
1717 case BUILT_IN_STRNCMP:
1718 case BUILT_IN_BCMP:
1719 case BUILT_IN_MEMCMP:
1720 case BUILT_IN_MEMCPY_CHK:
1721 case BUILT_IN_MEMPCPY_CHK:
1722 case BUILT_IN_MEMMOVE_CHK:
1723 case BUILT_IN_MEMSET_CHK:
1724 case BUILT_IN_STRCPY_CHK:
1725 case BUILT_IN_STRNCPY_CHK:
1726 case BUILT_IN_STPCPY_CHK:
1727 case BUILT_IN_STPNCPY_CHK:
1728 case BUILT_IN_STRCAT_CHK:
1729 case BUILT_IN_STRNCAT_CHK:
1730 case BUILT_IN_MALLOC:
1731 case BUILT_IN_CALLOC:
1732 case BUILT_IN_REALLOC:
1733 return 1;
1734
1735 default:
1736 return 0;
1737 }
1738 }
1739
1740 /* Add bound arguments to call statement pointed by GSI.
1741 Also performs a replacement of user checker builtins calls
1742 with internal ones. */
1743
1744 static void
1745 chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
1746 {
1747 gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
1748 unsigned arg_no = 0;
1749 tree fndecl = gimple_call_fndecl (call);
1750 tree fntype;
1751 tree first_formal_arg;
1752 tree arg;
1753 bool use_fntype = false;
1754 tree op;
1755 ssa_op_iter iter;
1756 gcall *new_call;
1757
1758 /* Do nothing for internal functions. */
1759 if (gimple_call_internal_p (call))
1760 return;
1761
1762 fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1763
1764 /* Do nothing if back-end builtin is called. */
1765 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
1766 return;
1767
1768 /* Do nothing for some middle-end builtins. */
1769 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1770 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
1771 return;
1772
1773 /* Do nothing for calls to not instrumentable functions. */
1774 if (fndecl && !chkp_instrumentable_p (fndecl))
1775 return;
1776
1777 /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
1778 and CHKP_COPY_PTR_BOUNDS. */
1779 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1780 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
1781 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
1782 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
1783 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
1784 return;
1785
1786 /* Check user builtins are replaced with checks. */
1787 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1788 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
1789 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
1790 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
1791 {
1792 chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
1793 return;
1794 }
1795
1796 /* Check user builtins are replaced with bound extract. */
1797 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1798 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
1799 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
1800 {
1801 chkp_replace_extract_builtin (gsi);
1802 return;
1803 }
1804
1805 /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
1806 target narrow bounds call. */
1807 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1808 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
1809 {
1810 tree arg = gimple_call_arg (call, 1);
1811 tree bounds = chkp_find_bounds (arg, gsi);
1812
1813 gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
1814 gimple_call_set_arg (call, 1, bounds);
1815 update_stmt (call);
1816
1817 return;
1818 }
1819
1820 /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
1821 bndstx call. */
1822 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1823 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
1824 {
1825 tree addr = gimple_call_arg (call, 0);
1826 tree ptr = gimple_call_arg (call, 1);
1827 tree bounds = chkp_find_bounds (ptr, gsi);
1828 gimple_stmt_iterator iter = gsi_for_stmt (call);
1829
1830 chkp_build_bndstx (addr, ptr, bounds, gsi);
1831 gsi_remove (&iter, true);
1832
1833 return;
1834 }
1835
1836 if (!flag_chkp_instrument_calls)
1837 return;
1838
1839 /* We instrument only some subset of builtins. We also instrument
1840 builtin calls to be inlined. */
1841 if (fndecl
1842 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1843 && !chkp_instrument_normal_builtin (fndecl))
1844 {
1845 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
1846 return;
1847
1848 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
1849 if (!clone
1850 || !gimple_has_body_p (clone->decl))
1851 return;
1852 }
1853
1854 /* If function decl is available then use it for
1855 formal arguments list. Otherwise use function type. */
1856 if (fndecl
1857 && DECL_ARGUMENTS (fndecl)
1858 && gimple_call_fntype (call) == TREE_TYPE (fndecl))
1859 first_formal_arg = DECL_ARGUMENTS (fndecl);
1860 else
1861 {
1862 first_formal_arg = TYPE_ARG_TYPES (fntype);
1863 use_fntype = true;
1864 }
1865
1866 /* Fill vector of new call args. */
1867 vec<tree> new_args = vNULL;
1868 new_args.create (gimple_call_num_args (call));
1869 arg = first_formal_arg;
1870 for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
1871 {
1872 tree call_arg = gimple_call_arg (call, arg_no);
1873 tree type;
1874
1875 /* Get arg type using formal argument description
1876 or actual argument type. */
1877 if (arg)
1878 if (use_fntype)
1879 if (TREE_VALUE (arg) != void_type_node)
1880 {
1881 type = TREE_VALUE (arg);
1882 arg = TREE_CHAIN (arg);
1883 }
1884 else
1885 type = TREE_TYPE (call_arg);
1886 else
1887 {
1888 type = TREE_TYPE (arg);
1889 arg = TREE_CHAIN (arg);
1890 }
1891 else
1892 type = TREE_TYPE (call_arg);
1893
1894 new_args.safe_push (call_arg);
1895
1896 if (BOUNDED_TYPE_P (type)
1897 || pass_by_reference (NULL, TYPE_MODE (type), type, true))
1898 new_args.safe_push (chkp_find_bounds (call_arg, gsi));
1899 else if (chkp_type_has_pointer (type))
1900 {
1901 HOST_WIDE_INT max_bounds
1902 = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
1903 tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
1904 HOST_WIDE_INT bnd_no;
1905
1906 memset (all_bounds, 0, sizeof (tree) * max_bounds);
1907
1908 chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
1909
1910 for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
1911 if (all_bounds[bnd_no])
1912 new_args.safe_push (all_bounds[bnd_no]);
1913
1914 free (all_bounds);
1915 }
1916 }
1917
1918 if (new_args.length () == gimple_call_num_args (call))
1919 new_call = call;
1920 else
1921 {
1922 new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
1923 gimple_call_set_lhs (new_call, gimple_call_lhs (call));
1924 gimple_call_copy_flags (new_call, call);
1925 gimple_call_set_chain (new_call, gimple_call_chain (call));
1926 }
1927 new_args.release ();
1928
1929 /* For direct calls fndecl is replaced with instrumented version. */
1930 if (fndecl)
1931 {
1932 tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
1933 gimple_call_set_fndecl (new_call, new_decl);
1934 /* In case of a type cast we should modify used function
1935 type instead of using type of new fndecl. */
1936 if (gimple_call_fntype (call) != TREE_TYPE (fndecl))
1937 {
1938 tree type = gimple_call_fntype (call);
1939 type = chkp_copy_function_type_adding_bounds (type);
1940 gimple_call_set_fntype (new_call, type);
1941 }
1942 else
1943 gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
1944 }
1945 /* For indirect call we should fix function pointer type if
1946 pass some bounds. */
1947 else if (new_call != call)
1948 {
1949 tree type = gimple_call_fntype (call);
1950 type = chkp_copy_function_type_adding_bounds (type);
1951 gimple_call_set_fntype (new_call, type);
1952 }
1953
1954 /* replace old call statement with the new one. */
1955 if (call != new_call)
1956 {
1957 FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
1958 {
1959 SSA_NAME_DEF_STMT (op) = new_call;
1960 }
1961 gsi_replace (gsi, new_call, true);
1962 }
1963 else
1964 update_stmt (new_call);
1965
1966 gimple_call_set_with_bounds (new_call, true);
1967 }
1968
1969 /* Return constant static bounds var with specified bounds LB and UB.
1970 If such var does not exists then new var is created with specified NAME. */
1971 static tree
1972 chkp_make_static_const_bounds (HOST_WIDE_INT lb,
1973 HOST_WIDE_INT ub,
1974 const char *name)
1975 {
1976 tree id = get_identifier (name);
1977 tree var;
1978 varpool_node *node;
1979 symtab_node *snode;
1980
1981 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
1982 pointer_bounds_type_node);
1983 TREE_STATIC (var) = 1;
1984 TREE_PUBLIC (var) = 1;
1985
1986 /* With LTO we may have constant bounds already in varpool.
1987 Try to find it. */
1988 if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
1989 {
1990 /* We don't allow this symbol usage for non bounds. */
1991 if (snode->type != SYMTAB_VARIABLE
1992 || !POINTER_BOUNDS_P (snode->decl))
1993 sorry ("-fcheck-pointer-bounds requires '%s' "
1994 "name for internal usage",
1995 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
1996
1997 return snode->decl;
1998 }
1999
2000 TREE_USED (var) = 1;
2001 TREE_READONLY (var) = 1;
2002 TREE_ADDRESSABLE (var) = 0;
2003 DECL_ARTIFICIAL (var) = 1;
2004 DECL_READ_P (var) = 1;
2005 DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
2006 make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
2007 /* We may use this symbol during ctors generation in chkp_finish_file
2008 when all symbols are emitted. Force output to avoid undefined
2009 symbols in ctors. */
2010 node = varpool_node::get_create (var);
2011 node->force_output = 1;
2012
2013 varpool_node::finalize_decl (var);
2014
2015 return var;
2016 }
2017
2018 /* Generate code to make bounds with specified lower bound LB and SIZE.
2019 if AFTER is 1 then code is inserted after position pointed by ITER
2020 otherwise code is inserted before position pointed by ITER.
2021 If ITER is NULL then code is added to entry block. */
2022 static tree
2023 chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
2024 {
2025 gimple_seq seq;
2026 gimple_stmt_iterator gsi;
2027 gimple *stmt;
2028 tree bounds;
2029
2030 if (iter)
2031 gsi = *iter;
2032 else
2033 gsi = gsi_start_bb (chkp_get_entry_block ());
2034
2035 seq = NULL;
2036
2037 lb = chkp_force_gimple_call_op (lb, &seq);
2038 size = chkp_force_gimple_call_op (size, &seq);
2039
2040 stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
2041 chkp_mark_stmt (stmt);
2042
2043 bounds = chkp_get_tmp_reg (stmt);
2044 gimple_call_set_lhs (stmt, bounds);
2045
2046 gimple_seq_add_stmt (&seq, stmt);
2047
2048 if (iter && after)
2049 gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
2050 else
2051 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2052
2053 if (dump_file && (dump_flags & TDF_DETAILS))
2054 {
2055 fprintf (dump_file, "Made bounds: ");
2056 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2057 if (iter)
2058 {
2059 fprintf (dump_file, " inserted before statement: ");
2060 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
2061 }
2062 else
2063 fprintf (dump_file, " at function entry\n");
2064 }
2065
2066 /* update_stmt (stmt); */
2067
2068 return bounds;
2069 }
2070
2071 /* Return var holding zero bounds. */
2072 tree
2073 chkp_get_zero_bounds_var (void)
2074 {
2075 if (!chkp_zero_bounds_var)
2076 chkp_zero_bounds_var
2077 = chkp_make_static_const_bounds (0, -1,
2078 CHKP_ZERO_BOUNDS_VAR_NAME);
2079 return chkp_zero_bounds_var;
2080 }
2081
2082 /* Return var holding none bounds. */
2083 tree
2084 chkp_get_none_bounds_var (void)
2085 {
2086 if (!chkp_none_bounds_var)
2087 chkp_none_bounds_var
2088 = chkp_make_static_const_bounds (-1, 0,
2089 CHKP_NONE_BOUNDS_VAR_NAME);
2090 return chkp_none_bounds_var;
2091 }
2092
2093 /* Return SSA_NAME used to represent zero bounds. */
2094 static tree
2095 chkp_get_zero_bounds (void)
2096 {
2097 if (zero_bounds)
2098 return zero_bounds;
2099
2100 if (dump_file && (dump_flags & TDF_DETAILS))
2101 fprintf (dump_file, "Creating zero bounds...");
2102
2103 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2104 || flag_chkp_use_static_const_bounds > 0)
2105 {
2106 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2107 gimple *stmt;
2108
2109 zero_bounds = chkp_get_tmp_reg (NULL);
2110 stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
2111 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2112 }
2113 else
2114 zero_bounds = chkp_make_bounds (integer_zero_node,
2115 integer_zero_node,
2116 NULL,
2117 false);
2118
2119 return zero_bounds;
2120 }
2121
2122 /* Return SSA_NAME used to represent none bounds. */
2123 static tree
2124 chkp_get_none_bounds (void)
2125 {
2126 if (none_bounds)
2127 return none_bounds;
2128
2129 if (dump_file && (dump_flags & TDF_DETAILS))
2130 fprintf (dump_file, "Creating none bounds...");
2131
2132
2133 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
2134 || flag_chkp_use_static_const_bounds > 0)
2135 {
2136 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
2137 gimple *stmt;
2138
2139 none_bounds = chkp_get_tmp_reg (NULL);
2140 stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
2141 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
2142 }
2143 else
2144 none_bounds = chkp_make_bounds (integer_minus_one_node,
2145 build_int_cst (size_type_node, 2),
2146 NULL,
2147 false);
2148
2149 return none_bounds;
2150 }
2151
2152 /* Return bounds to be used as a result of operation which
2153 should not create poiunter (e.g. MULT_EXPR). */
2154 static tree
2155 chkp_get_invalid_op_bounds (void)
2156 {
2157 return chkp_get_zero_bounds ();
2158 }
2159
2160 /* Return bounds to be used for loads of non-pointer values. */
2161 static tree
2162 chkp_get_nonpointer_load_bounds (void)
2163 {
2164 return chkp_get_zero_bounds ();
2165 }
2166
2167 /* Return 1 if may use bndret call to get bounds for pointer
2168 returned by CALL. */
2169 static bool
2170 chkp_call_returns_bounds_p (gcall *call)
2171 {
2172 if (gimple_call_internal_p (call))
2173 {
2174 if (gimple_call_internal_fn (call) == IFN_VA_ARG)
2175 return true;
2176 return false;
2177 }
2178
2179 if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
2180 || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
2181 return true;
2182
2183 if (gimple_call_with_bounds_p (call))
2184 return true;
2185
2186 tree fndecl = gimple_call_fndecl (call);
2187
2188 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
2189 return false;
2190
2191 if (fndecl && !chkp_instrumentable_p (fndecl))
2192 return false;
2193
2194 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2195 {
2196 if (chkp_instrument_normal_builtin (fndecl))
2197 return true;
2198
2199 if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
2200 return false;
2201
2202 struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
2203 return (clone && gimple_has_body_p (clone->decl));
2204 }
2205
2206 return true;
2207 }
2208
2209 /* Build bounds returned by CALL. */
2210 static tree
2211 chkp_build_returned_bound (gcall *call)
2212 {
2213 gimple_stmt_iterator gsi;
2214 tree bounds;
2215 gimple *stmt;
2216 tree fndecl = gimple_call_fndecl (call);
2217 unsigned int retflags;
2218
2219 /* To avoid fixing alloca expands in targets we handle
2220 it separately. */
2221 if (fndecl
2222 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2223 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
2224 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2225 {
2226 tree size = gimple_call_arg (call, 0);
2227 tree lb = gimple_call_lhs (call);
2228 gimple_stmt_iterator iter = gsi_for_stmt (call);
2229 bounds = chkp_make_bounds (lb, size, &iter, true);
2230 }
2231 /* We know bounds returned by set_bounds builtin call. */
2232 else if (fndecl
2233 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2234 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
2235 {
2236 tree lb = gimple_call_arg (call, 0);
2237 tree size = gimple_call_arg (call, 1);
2238 gimple_stmt_iterator iter = gsi_for_stmt (call);
2239 bounds = chkp_make_bounds (lb, size, &iter, true);
2240 }
2241 /* Detect bounds initialization calls. */
2242 else if (fndecl
2243 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2244 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
2245 bounds = chkp_get_zero_bounds ();
2246 /* Detect bounds nullification calls. */
2247 else if (fndecl
2248 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2249 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
2250 bounds = chkp_get_none_bounds ();
2251 /* Detect bounds copy calls. */
2252 else if (fndecl
2253 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2254 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
2255 {
2256 gimple_stmt_iterator iter = gsi_for_stmt (call);
2257 bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
2258 }
2259 /* Do not use retbnd when returned bounds are equal to some
2260 of passed bounds. */
2261 else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
2262 && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
2263 {
2264 gimple_stmt_iterator iter = gsi_for_stmt (call);
2265 unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
2266 if (gimple_call_with_bounds_p (call))
2267 {
2268 for (argno = 0; argno < gimple_call_num_args (call); argno++)
2269 if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
2270 {
2271 if (retarg)
2272 retarg--;
2273 else
2274 break;
2275 }
2276 }
2277 else
2278 argno = retarg;
2279
2280 bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
2281 }
2282 else if (chkp_call_returns_bounds_p (call))
2283 {
2284 gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
2285
2286 /* In general case build checker builtin call to
2287 obtain returned bounds. */
2288 stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
2289 gimple_call_lhs (call));
2290 chkp_mark_stmt (stmt);
2291
2292 gsi = gsi_for_stmt (call);
2293 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
2294
2295 bounds = chkp_get_tmp_reg (stmt);
2296 gimple_call_set_lhs (stmt, bounds);
2297
2298 update_stmt (stmt);
2299 }
2300 else
2301 bounds = chkp_get_zero_bounds ();
2302
2303 if (dump_file && (dump_flags & TDF_DETAILS))
2304 {
2305 fprintf (dump_file, "Built returned bounds (");
2306 print_generic_expr (dump_file, bounds, 0);
2307 fprintf (dump_file, ") for call: ");
2308 print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
2309 }
2310
2311 bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
2312
2313 return bounds;
2314 }
2315
2316 /* Return bounds used as returned by call
2317 which produced SSA name VAL. */
2318 gcall *
2319 chkp_retbnd_call_by_val (tree val)
2320 {
2321 if (TREE_CODE (val) != SSA_NAME)
2322 return NULL;
2323
2324 gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
2325
2326 imm_use_iterator use_iter;
2327 use_operand_p use_p;
2328 FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
2329 if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
2330 && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
2331 return as_a <gcall *> (USE_STMT (use_p));
2332
2333 return NULL;
2334 }
2335
2336 /* Check the next parameter for the given PARM is bounds
2337 and return it's default SSA_NAME (create if required). */
2338 static tree
2339 chkp_get_next_bounds_parm (tree parm)
2340 {
2341 tree bounds = TREE_CHAIN (parm);
2342 gcc_assert (POINTER_BOUNDS_P (bounds));
2343 bounds = ssa_default_def (cfun, bounds);
2344 if (!bounds)
2345 {
2346 bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
2347 set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
2348 }
2349 return bounds;
2350 }
2351
2352 /* Return bounds to be used for input argument PARM. */
2353 static tree
2354 chkp_get_bound_for_parm (tree parm)
2355 {
2356 tree decl = SSA_NAME_VAR (parm);
2357 tree bounds;
2358
2359 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2360
2361 bounds = chkp_get_registered_bounds (parm);
2362
2363 if (!bounds)
2364 bounds = chkp_get_registered_bounds (decl);
2365
2366 if (!bounds)
2367 {
2368 tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
2369
2370 /* For static chain param we return zero bounds
2371 because currently we do not check dereferences
2372 of this pointer. */
2373 if (cfun->static_chain_decl == decl)
2374 bounds = chkp_get_zero_bounds ();
2375 /* If non instrumented runtime is used then it may be useful
2376 to use zero bounds for input arguments of main
2377 function. */
2378 else if (flag_chkp_zero_input_bounds_for_main
2379 && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
2380 "main") == 0)
2381 bounds = chkp_get_zero_bounds ();
2382 else if (BOUNDED_P (parm))
2383 {
2384 bounds = chkp_get_next_bounds_parm (decl);
2385 bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
2386
2387 if (dump_file && (dump_flags & TDF_DETAILS))
2388 {
2389 fprintf (dump_file, "Built arg bounds (");
2390 print_generic_expr (dump_file, bounds, 0);
2391 fprintf (dump_file, ") for arg: ");
2392 print_node (dump_file, "", decl, 0);
2393 }
2394 }
2395 else
2396 bounds = chkp_get_zero_bounds ();
2397 }
2398
2399 if (!chkp_get_registered_bounds (parm))
2400 bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
2401
2402 if (dump_file && (dump_flags & TDF_DETAILS))
2403 {
2404 fprintf (dump_file, "Using bounds ");
2405 print_generic_expr (dump_file, bounds, 0);
2406 fprintf (dump_file, " for parm ");
2407 print_generic_expr (dump_file, parm, 0);
2408 fprintf (dump_file, " of type ");
2409 print_generic_expr (dump_file, TREE_TYPE (parm), 0);
2410 fprintf (dump_file, ".\n");
2411 }
2412
2413 return bounds;
2414 }
2415
2416 /* Build and return CALL_EXPR for bndstx builtin with specified
2417 arguments. */
2418 tree
2419 chkp_build_bndldx_call (tree addr, tree ptr)
2420 {
2421 tree fn = build1 (ADDR_EXPR,
2422 build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
2423 chkp_bndldx_fndecl);
2424 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
2425 fn, 2, addr, ptr);
2426 CALL_WITH_BOUNDS_P (call) = true;
2427 return call;
2428 }
2429
2430 /* Insert code to load bounds for PTR located by ADDR.
2431 Code is inserted after position pointed by GSI.
2432 Loaded bounds are returned. */
2433 static tree
2434 chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
2435 {
2436 gimple_seq seq;
2437 gimple *stmt;
2438 tree bounds;
2439
2440 seq = NULL;
2441
2442 addr = chkp_force_gimple_call_op (addr, &seq);
2443 ptr = chkp_force_gimple_call_op (ptr, &seq);
2444
2445 stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
2446 chkp_mark_stmt (stmt);
2447 bounds = chkp_get_tmp_reg (stmt);
2448 gimple_call_set_lhs (stmt, bounds);
2449
2450 gimple_seq_add_stmt (&seq, stmt);
2451
2452 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2453
2454 if (dump_file && (dump_flags & TDF_DETAILS))
2455 {
2456 fprintf (dump_file, "Generated bndldx for pointer ");
2457 print_generic_expr (dump_file, ptr, 0);
2458 fprintf (dump_file, ": ");
2459 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2460 }
2461
2462 return bounds;
2463 }
2464
2465 /* Build and return CALL_EXPR for bndstx builtin with specified
2466 arguments. */
2467 tree
2468 chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
2469 {
2470 tree fn = build1 (ADDR_EXPR,
2471 build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
2472 chkp_bndstx_fndecl);
2473 tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
2474 fn, 3, ptr, bounds, addr);
2475 CALL_WITH_BOUNDS_P (call) = true;
2476 return call;
2477 }
2478
2479 /* Insert code to store BOUNDS for PTR stored by ADDR.
2480 New statements are inserted after position pointed
2481 by GSI. */
2482 void
2483 chkp_build_bndstx (tree addr, tree ptr, tree bounds,
2484 gimple_stmt_iterator *gsi)
2485 {
2486 gimple_seq seq;
2487 gimple *stmt;
2488
2489 seq = NULL;
2490
2491 addr = chkp_force_gimple_call_op (addr, &seq);
2492 ptr = chkp_force_gimple_call_op (ptr, &seq);
2493
2494 stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
2495 chkp_mark_stmt (stmt);
2496 gimple_call_set_with_bounds (stmt, true);
2497
2498 gimple_seq_add_stmt (&seq, stmt);
2499
2500 gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
2501
2502 if (dump_file && (dump_flags & TDF_DETAILS))
2503 {
2504 fprintf (dump_file, "Generated bndstx for pointer store ");
2505 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
2506 print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
2507 }
2508 }
2509
2510 /* This function is called when call statement
2511 is inlined and therefore we can't use bndret
2512 for its LHS anymore. Function fixes bndret
2513 call using new RHS value if possible. */
2514 void
2515 chkp_fixup_inlined_call (tree lhs, tree rhs)
2516 {
2517 tree addr, bounds;
2518 gcall *retbnd, *bndldx;
2519
2520 if (!BOUNDED_P (lhs))
2521 return;
2522
2523 /* Search for retbnd call. */
2524 retbnd = chkp_retbnd_call_by_val (lhs);
2525 if (!retbnd)
2526 return;
2527
2528 /* Currently only handle cases when call is replaced
2529 with a memory access. In this case bndret call
2530 may be replaced with bndldx call. Otherwise we
2531 have to search for bounds which may cause wrong
2532 result due to various optimizations applied. */
2533 switch (TREE_CODE (rhs))
2534 {
2535 case VAR_DECL:
2536 if (DECL_REGISTER (rhs))
2537 return;
2538 break;
2539
2540 case MEM_REF:
2541 break;
2542
2543 case ARRAY_REF:
2544 case COMPONENT_REF:
2545 addr = get_base_address (rhs);
2546 if (!DECL_P (addr)
2547 && TREE_CODE (addr) != MEM_REF)
2548 return;
2549 if (DECL_P (addr) && DECL_REGISTER (addr))
2550 return;
2551 break;
2552
2553 default:
2554 return;
2555 }
2556
2557 /* Create a new statements sequence with bndldx call. */
2558 gimple_stmt_iterator gsi = gsi_for_stmt (retbnd);
2559 addr = build_fold_addr_expr (rhs);
2560 chkp_build_bndldx (addr, lhs, &gsi);
2561 bndldx = as_a <gcall *> (gsi_stmt (gsi));
2562
2563 /* Remove bndret call. */
2564 bounds = gimple_call_lhs (retbnd);
2565 gsi = gsi_for_stmt (retbnd);
2566 gsi_remove (&gsi, true);
2567
2568 /* Link new bndldx call. */
2569 gimple_call_set_lhs (bndldx, bounds);
2570 update_stmt (bndldx);
2571 }
2572
2573 /* Compute bounds for pointer NODE which was assigned in
2574 assignment statement ASSIGN. Return computed bounds. */
2575 static tree
2576 chkp_compute_bounds_for_assignment (tree node, gimple *assign)
2577 {
2578 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
2579 tree rhs1 = gimple_assign_rhs1 (assign);
2580 tree bounds = NULL_TREE;
2581 gimple_stmt_iterator iter = gsi_for_stmt (assign);
2582 tree base = NULL;
2583
2584 if (dump_file && (dump_flags & TDF_DETAILS))
2585 {
2586 fprintf (dump_file, "Computing bounds for assignment: ");
2587 print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
2588 }
2589
2590 switch (rhs_code)
2591 {
2592 case MEM_REF:
2593 case TARGET_MEM_REF:
2594 case COMPONENT_REF:
2595 case ARRAY_REF:
2596 /* We need to load bounds from the bounds table. */
2597 bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
2598 break;
2599
2600 case VAR_DECL:
2601 case SSA_NAME:
2602 case ADDR_EXPR:
2603 case POINTER_PLUS_EXPR:
2604 case NOP_EXPR:
2605 case CONVERT_EXPR:
2606 case INTEGER_CST:
2607 /* Bounds are just propagated from RHS. */
2608 bounds = chkp_find_bounds (rhs1, &iter);
2609 base = rhs1;
2610 break;
2611
2612 case VIEW_CONVERT_EXPR:
2613 /* Bounds are just propagated from RHS. */
2614 bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
2615 break;
2616
2617 case PARM_DECL:
2618 if (BOUNDED_P (rhs1))
2619 {
2620 /* We need to load bounds from the bounds table. */
2621 bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
2622 node, &iter);
2623 TREE_ADDRESSABLE (rhs1) = 1;
2624 }
2625 else
2626 bounds = chkp_get_nonpointer_load_bounds ();
2627 break;
2628
2629 case MINUS_EXPR:
2630 case PLUS_EXPR:
2631 case BIT_AND_EXPR:
2632 case BIT_IOR_EXPR:
2633 case BIT_XOR_EXPR:
2634 {
2635 tree rhs2 = gimple_assign_rhs2 (assign);
2636 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2637 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2638
2639 /* First we try to check types of operands. If it
2640 does not help then look at bound values.
2641
2642 If some bounds are incomplete and other are
2643 not proven to be valid (i.e. also incomplete
2644 or invalid because value is not pointer) then
2645 resulting value is incomplete and will be
2646 recomputed later in chkp_finish_incomplete_bounds. */
2647 if (BOUNDED_P (rhs1)
2648 && !BOUNDED_P (rhs2))
2649 bounds = bnd1;
2650 else if (BOUNDED_P (rhs2)
2651 && !BOUNDED_P (rhs1)
2652 && rhs_code != MINUS_EXPR)
2653 bounds = bnd2;
2654 else if (chkp_incomplete_bounds (bnd1))
2655 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
2656 && !chkp_incomplete_bounds (bnd2))
2657 bounds = bnd2;
2658 else
2659 bounds = incomplete_bounds;
2660 else if (chkp_incomplete_bounds (bnd2))
2661 if (chkp_valid_bounds (bnd1)
2662 && !chkp_incomplete_bounds (bnd1))
2663 bounds = bnd1;
2664 else
2665 bounds = incomplete_bounds;
2666 else if (!chkp_valid_bounds (bnd1))
2667 if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
2668 bounds = bnd2;
2669 else if (bnd2 == chkp_get_zero_bounds ())
2670 bounds = bnd2;
2671 else
2672 bounds = bnd1;
2673 else if (!chkp_valid_bounds (bnd2))
2674 bounds = bnd1;
2675 else
2676 /* Seems both operands may have valid bounds
2677 (e.g. pointer minus pointer). In such case
2678 use default invalid op bounds. */
2679 bounds = chkp_get_invalid_op_bounds ();
2680
2681 base = (bounds == bnd1) ? rhs1 : (bounds == bnd2) ? rhs2 : NULL;
2682 }
2683 break;
2684
2685 case BIT_NOT_EXPR:
2686 case NEGATE_EXPR:
2687 case LSHIFT_EXPR:
2688 case RSHIFT_EXPR:
2689 case LROTATE_EXPR:
2690 case RROTATE_EXPR:
2691 case EQ_EXPR:
2692 case NE_EXPR:
2693 case LT_EXPR:
2694 case LE_EXPR:
2695 case GT_EXPR:
2696 case GE_EXPR:
2697 case MULT_EXPR:
2698 case RDIV_EXPR:
2699 case TRUNC_DIV_EXPR:
2700 case FLOOR_DIV_EXPR:
2701 case CEIL_DIV_EXPR:
2702 case ROUND_DIV_EXPR:
2703 case TRUNC_MOD_EXPR:
2704 case FLOOR_MOD_EXPR:
2705 case CEIL_MOD_EXPR:
2706 case ROUND_MOD_EXPR:
2707 case EXACT_DIV_EXPR:
2708 case FIX_TRUNC_EXPR:
2709 case FLOAT_EXPR:
2710 case REALPART_EXPR:
2711 case IMAGPART_EXPR:
2712 /* No valid bounds may be produced by these exprs. */
2713 bounds = chkp_get_invalid_op_bounds ();
2714 break;
2715
2716 case COND_EXPR:
2717 {
2718 tree val1 = gimple_assign_rhs2 (assign);
2719 tree val2 = gimple_assign_rhs3 (assign);
2720 tree bnd1 = chkp_find_bounds (val1, &iter);
2721 tree bnd2 = chkp_find_bounds (val2, &iter);
2722 gimple *stmt;
2723
2724 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2725 bounds = incomplete_bounds;
2726 else if (bnd1 == bnd2)
2727 bounds = bnd1;
2728 else
2729 {
2730 rhs1 = unshare_expr (rhs1);
2731
2732 bounds = chkp_get_tmp_reg (assign);
2733 stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
2734 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2735
2736 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2737 chkp_mark_invalid_bounds (bounds);
2738 }
2739 }
2740 break;
2741
2742 case MAX_EXPR:
2743 case MIN_EXPR:
2744 {
2745 tree rhs2 = gimple_assign_rhs2 (assign);
2746 tree bnd1 = chkp_find_bounds (rhs1, &iter);
2747 tree bnd2 = chkp_find_bounds (rhs2, &iter);
2748
2749 if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
2750 bounds = incomplete_bounds;
2751 else if (bnd1 == bnd2)
2752 bounds = bnd1;
2753 else
2754 {
2755 gimple *stmt;
2756 tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
2757 boolean_type_node, rhs1, rhs2);
2758 bounds = chkp_get_tmp_reg (assign);
2759 stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
2760
2761 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2762
2763 if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
2764 chkp_mark_invalid_bounds (bounds);
2765 }
2766 }
2767 break;
2768
2769 default:
2770 bounds = chkp_get_zero_bounds ();
2771 warning (0, "pointer bounds were lost due to unexpected expression %s",
2772 get_tree_code_name (rhs_code));
2773 }
2774
2775 gcc_assert (bounds);
2776
2777 /* We may reuse bounds of other pointer we copy/modify. But it is not
2778 allowed for abnormal ssa names. If we produced a pointer using
2779 abnormal ssa name, we better make a bounds copy to avoid coalescing
2780 issues. */
2781 if (base
2782 && TREE_CODE (base) == SSA_NAME
2783 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (base))
2784 {
2785 gimple *stmt = gimple_build_assign (chkp_get_tmp_reg (NULL), bounds);
2786 gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
2787 bounds = gimple_assign_lhs (stmt);
2788 }
2789
2790 if (node)
2791 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2792
2793 return bounds;
2794 }
2795
2796 /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
2797
2798 There are just few statement codes allowed: NOP (for default ssa names),
2799 ASSIGN, CALL, PHI, ASM.
2800
2801 Return computed bounds. */
2802 static tree
2803 chkp_get_bounds_by_definition (tree node, gimple *def_stmt,
2804 gphi_iterator *iter)
2805 {
2806 tree var, bounds;
2807 enum gimple_code code = gimple_code (def_stmt);
2808 gphi *stmt;
2809
2810 if (dump_file && (dump_flags & TDF_DETAILS))
2811 {
2812 fprintf (dump_file, "Searching for bounds for node: ");
2813 print_generic_expr (dump_file, node, 0);
2814
2815 fprintf (dump_file, " using its definition: ");
2816 print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
2817 }
2818
2819 switch (code)
2820 {
2821 case GIMPLE_NOP:
2822 var = SSA_NAME_VAR (node);
2823 switch (TREE_CODE (var))
2824 {
2825 case PARM_DECL:
2826 bounds = chkp_get_bound_for_parm (node);
2827 break;
2828
2829 case VAR_DECL:
2830 /* For uninitialized pointers use none bounds. */
2831 bounds = chkp_get_none_bounds ();
2832 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2833 break;
2834
2835 case RESULT_DECL:
2836 {
2837 tree base_type;
2838
2839 gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
2840
2841 base_type = TREE_TYPE (TREE_TYPE (node));
2842
2843 gcc_assert (TYPE_SIZE (base_type)
2844 && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
2845 && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
2846
2847 bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
2848 NULL, false);
2849 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2850 }
2851 break;
2852
2853 default:
2854 if (dump_file && (dump_flags & TDF_DETAILS))
2855 {
2856 fprintf (dump_file, "Unexpected var with no definition\n");
2857 print_generic_expr (dump_file, var, 0);
2858 }
2859 internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
2860 get_tree_code_name (TREE_CODE (var)));
2861 }
2862 break;
2863
2864 case GIMPLE_ASSIGN:
2865 bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
2866 break;
2867
2868 case GIMPLE_CALL:
2869 bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
2870 break;
2871
2872 case GIMPLE_PHI:
2873 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
2874 if (SSA_NAME_VAR (node))
2875 var = chkp_get_bounds_var (SSA_NAME_VAR (node));
2876 else
2877 var = make_temp_ssa_name (pointer_bounds_type_node,
2878 NULL,
2879 CHKP_BOUND_TMP_NAME);
2880 else
2881 var = chkp_get_tmp_var ();
2882 stmt = create_phi_node (var, gimple_bb (def_stmt));
2883 bounds = gimple_phi_result (stmt);
2884 *iter = gsi_for_phi (stmt);
2885
2886 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2887
2888 /* Created bounds do not have all phi args computed and
2889 therefore we do not know if there is a valid source
2890 of bounds for that node. Therefore we mark bounds
2891 as incomplete and then recompute them when all phi
2892 args are computed. */
2893 chkp_register_incomplete_bounds (bounds, node);
2894 break;
2895
2896 case GIMPLE_ASM:
2897 bounds = chkp_get_zero_bounds ();
2898 bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
2899 break;
2900
2901 default:
2902 internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
2903 gimple_code_name[code]);
2904 }
2905
2906 return bounds;
2907 }
2908
2909 /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
2910 tree
2911 chkp_build_make_bounds_call (tree lower_bound, tree size)
2912 {
2913 tree call = build1 (ADDR_EXPR,
2914 build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
2915 chkp_bndmk_fndecl);
2916 return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
2917 call, 2, lower_bound, size);
2918 }
2919
2920 /* Create static bounds var of specfified OBJ which is
2921 is either VAR_DECL or string constant. */
2922 static tree
2923 chkp_make_static_bounds (tree obj)
2924 {
2925 static int string_id = 1;
2926 static int var_id = 1;
2927 tree *slot;
2928 const char *var_name;
2929 char *bnd_var_name;
2930 tree bnd_var;
2931
2932 /* First check if we already have required var. */
2933 if (chkp_static_var_bounds)
2934 {
2935 /* For vars we use assembler name as a key in
2936 chkp_static_var_bounds map. It allows to
2937 avoid duplicating bound vars for decls
2938 sharing assembler name. */
2939 if (TREE_CODE (obj) == VAR_DECL)
2940 {
2941 tree name = DECL_ASSEMBLER_NAME (obj);
2942 slot = chkp_static_var_bounds->get (name);
2943 if (slot)
2944 return *slot;
2945 }
2946 else
2947 {
2948 slot = chkp_static_var_bounds->get (obj);
2949 if (slot)
2950 return *slot;
2951 }
2952 }
2953
2954 /* Build decl for bounds var. */
2955 if (TREE_CODE (obj) == VAR_DECL)
2956 {
2957 if (DECL_IGNORED_P (obj))
2958 {
2959 bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
2960 sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
2961 }
2962 else
2963 {
2964 var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
2965
2966 /* For hidden symbols we want to skip first '*' char. */
2967 if (*var_name == '*')
2968 var_name++;
2969
2970 bnd_var_name = (char *) xmalloc (strlen (var_name)
2971 + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
2972 strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
2973 strcat (bnd_var_name, var_name);
2974 }
2975
2976 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2977 get_identifier (bnd_var_name),
2978 pointer_bounds_type_node);
2979
2980 /* Address of the obj will be used as lower bound. */
2981 TREE_ADDRESSABLE (obj) = 1;
2982 }
2983 else
2984 {
2985 bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
2986 sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
2987
2988 bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
2989 get_identifier (bnd_var_name),
2990 pointer_bounds_type_node);
2991 }
2992
2993 free (bnd_var_name);
2994
2995 TREE_PUBLIC (bnd_var) = 0;
2996 TREE_USED (bnd_var) = 1;
2997 TREE_READONLY (bnd_var) = 0;
2998 TREE_STATIC (bnd_var) = 1;
2999 TREE_ADDRESSABLE (bnd_var) = 0;
3000 DECL_ARTIFICIAL (bnd_var) = 1;
3001 DECL_COMMON (bnd_var) = 1;
3002 DECL_COMDAT (bnd_var) = 1;
3003 DECL_READ_P (bnd_var) = 1;
3004 DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
3005 /* Force output similar to constant bounds.
3006 See chkp_make_static_const_bounds. */
3007 varpool_node::get_create (bnd_var)->force_output = 1;
3008 /* Mark symbol as requiring bounds initialization. */
3009 varpool_node::get_create (bnd_var)->need_bounds_init = 1;
3010 varpool_node::finalize_decl (bnd_var);
3011
3012 /* Add created var to the map to use it for other references
3013 to obj. */
3014 if (!chkp_static_var_bounds)
3015 chkp_static_var_bounds = new hash_map<tree, tree>;
3016
3017 if (TREE_CODE (obj) == VAR_DECL)
3018 {
3019 tree name = DECL_ASSEMBLER_NAME (obj);
3020 chkp_static_var_bounds->put (name, bnd_var);
3021 }
3022 else
3023 chkp_static_var_bounds->put (obj, bnd_var);
3024
3025 return bnd_var;
3026 }
3027
3028 /* When var has incomplete type we cannot get size to
3029 compute its bounds. In such cases we use checker
3030 builtin call which determines object size at runtime. */
3031 static tree
3032 chkp_generate_extern_var_bounds (tree var)
3033 {
3034 tree bounds, size_reloc, lb, size, max_size, cond;
3035 gimple_stmt_iterator gsi;
3036 gimple_seq seq = NULL;
3037 gimple *stmt;
3038
3039 /* If instrumentation is not enabled for vars having
3040 incomplete type then just return zero bounds to avoid
3041 checks for this var. */
3042 if (!flag_chkp_incomplete_type)
3043 return chkp_get_zero_bounds ();
3044
3045 if (dump_file && (dump_flags & TDF_DETAILS))
3046 {
3047 fprintf (dump_file, "Generating bounds for extern symbol '");
3048 print_generic_expr (dump_file, var, 0);
3049 fprintf (dump_file, "'\n");
3050 }
3051
3052 stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
3053
3054 size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
3055 gimple_call_set_lhs (stmt, size_reloc);
3056
3057 gimple_seq_add_stmt (&seq, stmt);
3058
3059 lb = chkp_build_addr_expr (var);
3060 size = make_ssa_name (chkp_get_size_tmp_var ());
3061
3062 if (flag_chkp_zero_dynamic_size_as_infinite)
3063 {
3064 /* We should check that size relocation was resolved.
3065 If it was not then use maximum possible size for the var. */
3066 max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
3067 fold_convert (chkp_uintptr_type, lb));
3068 max_size = chkp_force_gimple_call_op (max_size, &seq);
3069
3070 cond = build2 (NE_EXPR, boolean_type_node,
3071 size_reloc, integer_zero_node);
3072 stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
3073 gimple_seq_add_stmt (&seq, stmt);
3074 }
3075 else
3076 {
3077 stmt = gimple_build_assign (size, size_reloc);
3078 gimple_seq_add_stmt (&seq, stmt);
3079 }
3080
3081 gsi = gsi_start_bb (chkp_get_entry_block ());
3082 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3083
3084 bounds = chkp_make_bounds (lb, size, &gsi, true);
3085
3086 return bounds;
3087 }
3088
3089 /* Return 1 if TYPE has fields with zero size or fields
3090 marked with chkp_variable_size attribute. */
3091 bool
3092 chkp_variable_size_type (tree type)
3093 {
3094 bool res = false;
3095 tree field;
3096
3097 if (RECORD_OR_UNION_TYPE_P (type))
3098 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3099 {
3100 if (TREE_CODE (field) == FIELD_DECL)
3101 res = res
3102 || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3103 || chkp_variable_size_type (TREE_TYPE (field));
3104 }
3105 else
3106 res = !TYPE_SIZE (type)
3107 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
3108 || tree_to_uhwi (TYPE_SIZE (type)) == 0;
3109
3110 return res;
3111 }
3112
3113 /* Compute and return bounds for address of DECL which is
3114 one of VAR_DECL, PARM_DECL, RESULT_DECL. */
3115 static tree
3116 chkp_get_bounds_for_decl_addr (tree decl)
3117 {
3118 tree bounds;
3119
3120 gcc_assert (TREE_CODE (decl) == VAR_DECL
3121 || TREE_CODE (decl) == PARM_DECL
3122 || TREE_CODE (decl) == RESULT_DECL);
3123
3124 bounds = chkp_get_registered_addr_bounds (decl);
3125
3126 if (bounds)
3127 return bounds;
3128
3129 if (dump_file && (dump_flags & TDF_DETAILS))
3130 {
3131 fprintf (dump_file, "Building bounds for address of decl ");
3132 print_generic_expr (dump_file, decl, 0);
3133 fprintf (dump_file, "\n");
3134 }
3135
3136 /* Use zero bounds if size is unknown and checks for
3137 unknown sizes are restricted. */
3138 if ((!DECL_SIZE (decl)
3139 || (chkp_variable_size_type (TREE_TYPE (decl))
3140 && (TREE_STATIC (decl)
3141 || DECL_EXTERNAL (decl)
3142 || TREE_PUBLIC (decl))))
3143 && !flag_chkp_incomplete_type)
3144 return chkp_get_zero_bounds ();
3145
3146 if (flag_chkp_use_static_bounds
3147 && TREE_CODE (decl) == VAR_DECL
3148 && (TREE_STATIC (decl)
3149 || DECL_EXTERNAL (decl)
3150 || TREE_PUBLIC (decl))
3151 && !DECL_THREAD_LOCAL_P (decl))
3152 {
3153 tree bnd_var = chkp_make_static_bounds (decl);
3154 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3155 gimple *stmt;
3156
3157 bounds = chkp_get_tmp_reg (NULL);
3158 stmt = gimple_build_assign (bounds, bnd_var);
3159 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3160 }
3161 else if (!DECL_SIZE (decl)
3162 || (chkp_variable_size_type (TREE_TYPE (decl))
3163 && (TREE_STATIC (decl)
3164 || DECL_EXTERNAL (decl)
3165 || TREE_PUBLIC (decl))))
3166 {
3167 gcc_assert (TREE_CODE (decl) == VAR_DECL);
3168 bounds = chkp_generate_extern_var_bounds (decl);
3169 }
3170 else
3171 {
3172 tree lb = chkp_build_addr_expr (decl);
3173 bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
3174 }
3175
3176 return bounds;
3177 }
3178
3179 /* Compute and return bounds for constant string. */
3180 static tree
3181 chkp_get_bounds_for_string_cst (tree cst)
3182 {
3183 tree bounds;
3184 tree lb;
3185 tree size;
3186
3187 gcc_assert (TREE_CODE (cst) == STRING_CST);
3188
3189 bounds = chkp_get_registered_bounds (cst);
3190
3191 if (bounds)
3192 return bounds;
3193
3194 if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
3195 || flag_chkp_use_static_const_bounds > 0)
3196 {
3197 tree bnd_var = chkp_make_static_bounds (cst);
3198 gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
3199 gimple *stmt;
3200
3201 bounds = chkp_get_tmp_reg (NULL);
3202 stmt = gimple_build_assign (bounds, bnd_var);
3203 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3204 }
3205 else
3206 {
3207 lb = chkp_build_addr_expr (cst);
3208 size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
3209 bounds = chkp_make_bounds (lb, size, NULL, false);
3210 }
3211
3212 bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
3213
3214 return bounds;
3215 }
3216
3217 /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
3218 return the result. if ITER is not NULL then Code is inserted
3219 before position pointed by ITER. Otherwise code is added to
3220 entry block. */
3221 static tree
3222 chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
3223 {
3224 if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
3225 return bounds2 ? bounds2 : bounds1;
3226 else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
3227 return bounds1;
3228 else
3229 {
3230 gimple_seq seq;
3231 gimple *stmt;
3232 tree bounds;
3233
3234 seq = NULL;
3235
3236 stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
3237 chkp_mark_stmt (stmt);
3238
3239 bounds = chkp_get_tmp_reg (stmt);
3240 gimple_call_set_lhs (stmt, bounds);
3241
3242 gimple_seq_add_stmt (&seq, stmt);
3243
3244 /* We are probably doing narrowing for constant expression.
3245 In such case iter may be undefined. */
3246 if (!iter)
3247 {
3248 gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
3249 iter = &gsi;
3250 gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
3251 }
3252 else
3253 gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
3254
3255 if (dump_file && (dump_flags & TDF_DETAILS))
3256 {
3257 fprintf (dump_file, "Bounds intersection: ");
3258 print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
3259 fprintf (dump_file, " inserted before statement: ");
3260 print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
3261 TDF_VOPS|TDF_MEMSYMS);
3262 }
3263
3264 return bounds;
3265 }
3266 }
3267
3268 /* Return 1 if we are allowed to narrow bounds for addressed FIELD
3269 and 0 othersize. */
3270 static bool
3271 chkp_may_narrow_to_field (tree field)
3272 {
3273 return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
3274 && tree_to_uhwi (DECL_SIZE (field)) != 0
3275 && (!DECL_FIELD_OFFSET (field)
3276 || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
3277 && (!DECL_FIELD_BIT_OFFSET (field)
3278 || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
3279 && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
3280 && !chkp_variable_size_type (TREE_TYPE (field));
3281 }
3282
3283 /* Return 1 if bounds for FIELD should be narrowed to
3284 field's own size. */
3285 static bool
3286 chkp_narrow_bounds_for_field (tree field)
3287 {
3288 HOST_WIDE_INT offs;
3289 HOST_WIDE_INT bit_offs;
3290
3291 if (!chkp_may_narrow_to_field (field))
3292 return false;
3293
3294 /* Accesse to compiler generated fields should not cause
3295 bounds narrowing. */
3296 if (DECL_ARTIFICIAL (field))
3297 return false;
3298
3299 offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
3300 bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
3301
3302 return (flag_chkp_narrow_bounds
3303 && (flag_chkp_first_field_has_own_bounds
3304 || offs
3305 || bit_offs));
3306 }
3307
3308 /* Perform narrowing for BOUNDS using bounds computed for field
3309 access COMPONENT. ITER meaning is the same as for
3310 chkp_intersect_bounds. */
3311 static tree
3312 chkp_narrow_bounds_to_field (tree bounds, tree component,
3313 gimple_stmt_iterator *iter)
3314 {
3315 tree field = TREE_OPERAND (component, 1);
3316 tree size = DECL_SIZE_UNIT (field);
3317 tree field_ptr = chkp_build_addr_expr (component);
3318 tree field_bounds;
3319
3320 field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
3321
3322 return chkp_intersect_bounds (field_bounds, bounds, iter);
3323 }
3324
3325 /* Parse field or array access NODE.
3326
3327 PTR ouput parameter holds a pointer to the outermost
3328 object.
3329
3330 BITFIELD output parameter is set to 1 if bitfield is
3331 accessed and to 0 otherwise. If it is 1 then ELT holds
3332 outer component for accessed bit field.
3333
3334 SAFE outer parameter is set to 1 if access is safe and
3335 checks are not required.
3336
3337 BOUNDS outer parameter holds bounds to be used to check
3338 access (may be NULL).
3339
3340 If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
3341 innermost accessed component. */
3342 static void
3343 chkp_parse_array_and_component_ref (tree node, tree *ptr,
3344 tree *elt, bool *safe,
3345 bool *bitfield,
3346 tree *bounds,
3347 gimple_stmt_iterator *iter,
3348 bool innermost_bounds)
3349 {
3350 tree comp_to_narrow = NULL_TREE;
3351 tree last_comp = NULL_TREE;
3352 bool array_ref_found = false;
3353 tree *nodes;
3354 tree var;
3355 int len;
3356 int i;
3357
3358 /* Compute tree height for expression. */
3359 var = node;
3360 len = 1;
3361 while (TREE_CODE (var) == COMPONENT_REF
3362 || TREE_CODE (var) == ARRAY_REF
3363 || TREE_CODE (var) == VIEW_CONVERT_EXPR)
3364 {
3365 var = TREE_OPERAND (var, 0);
3366 len++;
3367 }
3368
3369 gcc_assert (len > 1);
3370
3371 /* It is more convenient for us to scan left-to-right,
3372 so walk tree again and put all node to nodes vector
3373 in reversed order. */
3374 nodes = XALLOCAVEC (tree, len);
3375 nodes[len - 1] = node;
3376 for (i = len - 2; i >= 0; i--)
3377 nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
3378
3379 if (bounds)
3380 *bounds = NULL;
3381 *safe = true;
3382 *bitfield = (TREE_CODE (node) == COMPONENT_REF
3383 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
3384 /* To get bitfield address we will need outer elemnt. */
3385 if (*bitfield)
3386 *elt = nodes[len - 2];
3387 else
3388 *elt = NULL_TREE;
3389
3390 /* If we have indirection in expression then compute
3391 outermost structure bounds. Computed bounds may be
3392 narrowed later. */
3393 if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
3394 {
3395 *safe = false;
3396 *ptr = TREE_OPERAND (nodes[0], 0);
3397 if (bounds)
3398 *bounds = chkp_find_bounds (*ptr, iter);
3399 }
3400 else
3401 {
3402 gcc_assert (TREE_CODE (var) == VAR_DECL
3403 || TREE_CODE (var) == PARM_DECL
3404 || TREE_CODE (var) == RESULT_DECL
3405 || TREE_CODE (var) == STRING_CST
3406 || TREE_CODE (var) == SSA_NAME);
3407
3408 *ptr = chkp_build_addr_expr (var);
3409 }
3410
3411 /* In this loop we are trying to find a field access
3412 requiring narrowing. There are two simple rules
3413 for search:
3414 1. Leftmost array_ref is chosen if any.
3415 2. Rightmost suitable component_ref is chosen if innermost
3416 bounds are required and no array_ref exists. */
3417 for (i = 1; i < len; i++)
3418 {
3419 var = nodes[i];
3420
3421 if (TREE_CODE (var) == ARRAY_REF)
3422 {
3423 *safe = false;
3424 array_ref_found = true;
3425 if (flag_chkp_narrow_bounds
3426 && !flag_chkp_narrow_to_innermost_arrray
3427 && (!last_comp
3428 || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
3429 {
3430 comp_to_narrow = last_comp;
3431 break;
3432 }
3433 }
3434 else if (TREE_CODE (var) == COMPONENT_REF)
3435 {
3436 tree field = TREE_OPERAND (var, 1);
3437
3438 if (innermost_bounds
3439 && !array_ref_found
3440 && chkp_narrow_bounds_for_field (field))
3441 comp_to_narrow = var;
3442 last_comp = var;
3443
3444 if (flag_chkp_narrow_bounds
3445 && flag_chkp_narrow_to_innermost_arrray
3446 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
3447 {
3448 if (bounds)
3449 *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
3450 comp_to_narrow = NULL;
3451 }
3452 }
3453 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
3454 /* Nothing to do for it. */
3455 ;
3456 else
3457 gcc_unreachable ();
3458 }
3459
3460 if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
3461 *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
3462
3463 if (innermost_bounds && bounds && !*bounds)
3464 *bounds = chkp_find_bounds (*ptr, iter);
3465 }
3466
3467 /* Compute and return bounds for address of OBJ. */
3468 static tree
3469 chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
3470 {
3471 tree bounds = chkp_get_registered_addr_bounds (obj);
3472
3473 if (bounds)
3474 return bounds;
3475
3476 switch (TREE_CODE (obj))
3477 {
3478 case VAR_DECL:
3479 case PARM_DECL:
3480 case RESULT_DECL:
3481 bounds = chkp_get_bounds_for_decl_addr (obj);
3482 break;
3483
3484 case STRING_CST:
3485 bounds = chkp_get_bounds_for_string_cst (obj);
3486 break;
3487
3488 case ARRAY_REF:
3489 case COMPONENT_REF:
3490 {
3491 tree elt;
3492 tree ptr;
3493 bool safe;
3494 bool bitfield;
3495
3496 chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
3497 &bitfield, &bounds, iter, true);
3498
3499 gcc_assert (bounds);
3500 }
3501 break;
3502
3503 case FUNCTION_DECL:
3504 case LABEL_DECL:
3505 bounds = chkp_get_zero_bounds ();
3506 break;
3507
3508 case MEM_REF:
3509 bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
3510 break;
3511
3512 case REALPART_EXPR:
3513 case IMAGPART_EXPR:
3514 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
3515 break;
3516
3517 default:
3518 if (dump_file && (dump_flags & TDF_DETAILS))
3519 {
3520 fprintf (dump_file, "chkp_make_addressed_object_bounds: "
3521 "unexpected object of type %s\n",
3522 get_tree_code_name (TREE_CODE (obj)));
3523 print_node (dump_file, "", obj, 0);
3524 }
3525 internal_error ("chkp_make_addressed_object_bounds: "
3526 "Unexpected tree code %s",
3527 get_tree_code_name (TREE_CODE (obj)));
3528 }
3529
3530 chkp_register_addr_bounds (obj, bounds);
3531
3532 return bounds;
3533 }
3534
3535 /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
3536 to compute bounds if required. Computed bounds should be available at
3537 position pointed by ITER.
3538
3539 If PTR_SRC is NULL_TREE then pointer definition is identified.
3540
3541 If PTR_SRC is not NULL_TREE then ITER points to statements which loads
3542 PTR. If PTR is a any memory reference then ITER points to a statement
3543 after which bndldx will be inserterd. In both cases ITER will be updated
3544 to point to the inserted bndldx statement. */
3545
3546 static tree
3547 chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3548 {
3549 tree addr = NULL_TREE;
3550 tree bounds = NULL_TREE;
3551
3552 if (!ptr_src)
3553 ptr_src = ptr;
3554
3555 bounds = chkp_get_registered_bounds (ptr_src);
3556
3557 if (bounds)
3558 return bounds;
3559
3560 switch (TREE_CODE (ptr_src))
3561 {
3562 case MEM_REF:
3563 case VAR_DECL:
3564 if (BOUNDED_P (ptr_src))
3565 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3566 bounds = chkp_get_zero_bounds ();
3567 else
3568 {
3569 addr = chkp_build_addr_expr (ptr_src);
3570 bounds = chkp_build_bndldx (addr, ptr, iter);
3571 }
3572 else
3573 bounds = chkp_get_nonpointer_load_bounds ();
3574 break;
3575
3576 case ARRAY_REF:
3577 case COMPONENT_REF:
3578 addr = get_base_address (ptr_src);
3579 if (DECL_P (addr)
3580 || TREE_CODE (addr) == MEM_REF
3581 || TREE_CODE (addr) == TARGET_MEM_REF)
3582 {
3583 if (BOUNDED_P (ptr_src))
3584 if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
3585 bounds = chkp_get_zero_bounds ();
3586 else
3587 {
3588 addr = chkp_build_addr_expr (ptr_src);
3589 bounds = chkp_build_bndldx (addr, ptr, iter);
3590 }
3591 else
3592 bounds = chkp_get_nonpointer_load_bounds ();
3593 }
3594 else
3595 {
3596 gcc_assert (TREE_CODE (addr) == SSA_NAME);
3597 bounds = chkp_find_bounds (addr, iter);
3598 }
3599 break;
3600
3601 case PARM_DECL:
3602 gcc_unreachable ();
3603 bounds = chkp_get_bound_for_parm (ptr_src);
3604 break;
3605
3606 case TARGET_MEM_REF:
3607 addr = chkp_build_addr_expr (ptr_src);
3608 bounds = chkp_build_bndldx (addr, ptr, iter);
3609 break;
3610
3611 case SSA_NAME:
3612 bounds = chkp_get_registered_bounds (ptr_src);
3613 if (!bounds)
3614 {
3615 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr_src);
3616 gphi_iterator phi_iter;
3617
3618 bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
3619
3620 gcc_assert (bounds);
3621
3622 if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
3623 {
3624 unsigned i;
3625
3626 for (i = 0; i < gimple_phi_num_args (def_phi); i++)
3627 {
3628 tree arg = gimple_phi_arg_def (def_phi, i);
3629 tree arg_bnd;
3630 gphi *phi_bnd;
3631
3632 arg_bnd = chkp_find_bounds (arg, NULL);
3633
3634 /* chkp_get_bounds_by_definition created new phi
3635 statement and phi_iter points to it.
3636
3637 Previous call to chkp_find_bounds could create
3638 new basic block and therefore change phi statement
3639 phi_iter points to. */
3640 phi_bnd = phi_iter.phi ();
3641
3642 add_phi_arg (phi_bnd, arg_bnd,
3643 gimple_phi_arg_edge (def_phi, i),
3644 UNKNOWN_LOCATION);
3645 }
3646
3647 /* If all bound phi nodes have their arg computed
3648 then we may finish its computation. See
3649 chkp_finish_incomplete_bounds for more details. */
3650 if (chkp_may_finish_incomplete_bounds ())
3651 chkp_finish_incomplete_bounds ();
3652 }
3653
3654 gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
3655 || chkp_incomplete_bounds (bounds));
3656 }
3657 break;
3658
3659 case ADDR_EXPR:
3660 case WITH_SIZE_EXPR:
3661 bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
3662 break;
3663
3664 case INTEGER_CST:
3665 if (integer_zerop (ptr_src))
3666 bounds = chkp_get_none_bounds ();
3667 else
3668 bounds = chkp_get_invalid_op_bounds ();
3669 break;
3670
3671 default:
3672 if (dump_file && (dump_flags & TDF_DETAILS))
3673 {
3674 fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
3675 get_tree_code_name (TREE_CODE (ptr_src)));
3676 print_node (dump_file, "", ptr_src, 0);
3677 }
3678 internal_error ("chkp_find_bounds: Unexpected tree code %s",
3679 get_tree_code_name (TREE_CODE (ptr_src)));
3680 }
3681
3682 if (!bounds)
3683 {
3684 if (dump_file && (dump_flags & TDF_DETAILS))
3685 {
3686 fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
3687 print_node (dump_file, "", ptr_src, 0);
3688 }
3689 internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
3690 }
3691
3692 return bounds;
3693 }
3694
3695 /* Normal case for bounds search without forced narrowing. */
3696 static tree
3697 chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
3698 {
3699 return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
3700 }
3701
3702 /* Search bounds for pointer PTR loaded from PTR_SRC
3703 by statement *ITER points to. */
3704 static tree
3705 chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
3706 {
3707 return chkp_find_bounds_1 (ptr, ptr_src, iter);
3708 }
3709
3710 /* Helper function which checks type of RHS and finds all pointers in
3711 it. For each found pointer we build it's accesses in LHS and RHS
3712 objects and then call HANDLER for them. Function is used to copy
3713 or initilize bounds for copied object. */
3714 static void
3715 chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
3716 assign_handler handler)
3717 {
3718 tree type = TREE_TYPE (lhs);
3719
3720 /* We have nothing to do with clobbers. */
3721 if (TREE_CLOBBER_P (rhs))
3722 return;
3723
3724 if (BOUNDED_TYPE_P (type))
3725 handler (lhs, rhs, arg);
3726 else if (RECORD_OR_UNION_TYPE_P (type))
3727 {
3728 tree field;
3729
3730 if (TREE_CODE (rhs) == CONSTRUCTOR)
3731 {
3732 unsigned HOST_WIDE_INT cnt;
3733 tree val;
3734
3735 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
3736 {
3737 if (chkp_type_has_pointer (TREE_TYPE (field)))
3738 {
3739 tree lhs_field = chkp_build_component_ref (lhs, field);
3740 chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
3741 }
3742 }
3743 }
3744 else
3745 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3746 if (TREE_CODE (field) == FIELD_DECL
3747 && chkp_type_has_pointer (TREE_TYPE (field)))
3748 {
3749 tree rhs_field = chkp_build_component_ref (rhs, field);
3750 tree lhs_field = chkp_build_component_ref (lhs, field);
3751 chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
3752 }
3753 }
3754 else if (TREE_CODE (type) == ARRAY_TYPE)
3755 {
3756 unsigned HOST_WIDE_INT cur = 0;
3757 tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3758 tree etype = TREE_TYPE (type);
3759 tree esize = TYPE_SIZE (etype);
3760
3761 if (TREE_CODE (rhs) == CONSTRUCTOR)
3762 {
3763 unsigned HOST_WIDE_INT cnt;
3764 tree purp, val, lhs_elem;
3765
3766 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
3767 {
3768 if (purp && TREE_CODE (purp) == RANGE_EXPR)
3769 {
3770 tree lo_index = TREE_OPERAND (purp, 0);
3771 tree hi_index = TREE_OPERAND (purp, 1);
3772
3773 for (cur = (unsigned)tree_to_uhwi (lo_index);
3774 cur <= (unsigned)tree_to_uhwi (hi_index);
3775 cur++)
3776 {
3777 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3778 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3779 }
3780 }
3781 else
3782 {
3783 if (purp)
3784 {
3785 gcc_assert (TREE_CODE (purp) == INTEGER_CST);
3786 cur = tree_to_uhwi (purp);
3787 }
3788
3789 lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
3790
3791 chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
3792 }
3793 }
3794 }
3795 /* Copy array only when size is known. */
3796 else if (maxval && !integer_minus_onep (maxval))
3797 for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
3798 {
3799 tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
3800 tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
3801 chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
3802 }
3803 }
3804 else
3805 internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
3806 get_tree_code_name (TREE_CODE (type)));
3807 }
3808
3809 /* Add code to copy bounds for assignment of RHS to LHS.
3810 ARG is an iterator pointing ne code position. */
3811 static void
3812 chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
3813 {
3814 gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
3815 tree bounds = chkp_find_bounds (rhs, iter);
3816 tree addr = chkp_build_addr_expr(lhs);
3817
3818 chkp_build_bndstx (addr, rhs, bounds, iter);
3819 }
3820
3821 /* Emit static bound initilizers and size vars. */
3822 void
3823 chkp_finish_file (void)
3824 {
3825 struct varpool_node *node;
3826 struct chkp_ctor_stmt_list stmts;
3827
3828 if (seen_error ())
3829 return;
3830
3831 /* Iterate through varpool and generate bounds initialization
3832 constructors for all statically initialized pointers. */
3833 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3834 stmts.stmts = NULL;
3835 FOR_EACH_VARIABLE (node)
3836 /* Check that var is actually emitted and we need and may initialize
3837 its bounds. */
3838 if (node->need_bounds_init
3839 && !POINTER_BOUNDS_P (node->decl)
3840 && DECL_RTL (node->decl)
3841 && MEM_P (DECL_RTL (node->decl))
3842 && TREE_ASM_WRITTEN (node->decl))
3843 {
3844 chkp_walk_pointer_assignments (node->decl,
3845 DECL_INITIAL (node->decl),
3846 &stmts,
3847 chkp_add_modification_to_stmt_list);
3848
3849 if (stmts.avail <= 0)
3850 {
3851 cgraph_build_static_cdtor ('P', stmts.stmts,
3852 MAX_RESERVED_INIT_PRIORITY + 3);
3853 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3854 stmts.stmts = NULL;
3855 }
3856 }
3857
3858 if (stmts.stmts)
3859 cgraph_build_static_cdtor ('P', stmts.stmts,
3860 MAX_RESERVED_INIT_PRIORITY + 3);
3861
3862 /* Iterate through varpool and generate bounds initialization
3863 constructors for all static bounds vars. */
3864 stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
3865 stmts.stmts = NULL;
3866 FOR_EACH_VARIABLE (node)
3867 if (node->need_bounds_init
3868 && POINTER_BOUNDS_P (node->decl)
3869 && TREE_ASM_WRITTEN (node->decl))
3870 {
3871 tree bnd = node->decl;
3872 tree var;
3873
3874 gcc_assert (DECL_INITIAL (bnd)
3875 && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
3876
3877 var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
3878 chkp_output_static_bounds (bnd, var, &stmts);
3879 }
3880
3881 if (stmts.stmts)
3882 cgraph_build_static_cdtor ('B', stmts.stmts,
3883 MAX_RESERVED_INIT_PRIORITY + 2);
3884
3885 delete chkp_static_var_bounds;
3886 delete chkp_bounds_map;
3887 }
3888
3889 /* An instrumentation function which is called for each statement
3890 having memory access we want to instrument. It inserts check
3891 code and bounds copy code.
3892
3893 ITER points to statement to instrument.
3894
3895 NODE holds memory access in statement to check.
3896
3897 LOC holds the location information for statement.
3898
3899 DIRFLAGS determines whether access is read or write.
3900
3901 ACCESS_OFFS should be added to address used in NODE
3902 before check.
3903
3904 ACCESS_SIZE holds size of checked access.
3905
3906 SAFE indicates if NODE access is safe and should not be
3907 checked. */
3908 static void
3909 chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
3910 location_t loc, tree dirflag,
3911 tree access_offs, tree access_size,
3912 bool safe)
3913 {
3914 tree node_type = TREE_TYPE (node);
3915 tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
3916 tree addr_first = NULL_TREE; /* address of the first accessed byte */
3917 tree addr_last = NULL_TREE; /* address of the last accessed byte */
3918 tree ptr = NULL_TREE; /* a pointer used for dereference */
3919 tree bounds = NULL_TREE;
3920
3921 /* We do not need instrumentation for clobbers. */
3922 if (dirflag == integer_one_node
3923 && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
3924 && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
3925 return;
3926
3927 switch (TREE_CODE (node))
3928 {
3929 case ARRAY_REF:
3930 case COMPONENT_REF:
3931 {
3932 bool bitfield;
3933 tree elt;
3934
3935 if (safe)
3936 {
3937 /* We are not going to generate any checks, so do not
3938 generate bounds as well. */
3939 addr_first = chkp_build_addr_expr (node);
3940 break;
3941 }
3942
3943 chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
3944 &bitfield, &bounds, iter, false);
3945
3946 /* Break if there is no dereference and operation is safe. */
3947
3948 if (bitfield)
3949 {
3950 tree field = TREE_OPERAND (node, 1);
3951
3952 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
3953 size = DECL_SIZE_UNIT (field);
3954
3955 if (elt)
3956 elt = chkp_build_addr_expr (elt);
3957 addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
3958 addr_first = fold_build_pointer_plus_loc (loc,
3959 addr_first,
3960 byte_position (field));
3961 }
3962 else
3963 addr_first = chkp_build_addr_expr (node);
3964 }
3965 break;
3966
3967 case INDIRECT_REF:
3968 ptr = TREE_OPERAND (node, 0);
3969 addr_first = ptr;
3970 break;
3971
3972 case MEM_REF:
3973 ptr = TREE_OPERAND (node, 0);
3974 addr_first = chkp_build_addr_expr (node);
3975 break;
3976
3977 case TARGET_MEM_REF:
3978 ptr = TMR_BASE (node);
3979 addr_first = chkp_build_addr_expr (node);
3980 break;
3981
3982 case ARRAY_RANGE_REF:
3983 printf("ARRAY_RANGE_REF\n");
3984 debug_gimple_stmt(gsi_stmt(*iter));
3985 debug_tree(node);
3986 gcc_unreachable ();
3987 break;
3988
3989 case BIT_FIELD_REF:
3990 {
3991 tree offs, rem, bpu;
3992
3993 gcc_assert (!access_offs);
3994 gcc_assert (!access_size);
3995
3996 bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
3997 offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
3998 rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
3999 offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
4000
4001 size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
4002 size = size_binop_loc (loc, PLUS_EXPR, size, rem);
4003 size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
4004 size = fold_convert (size_type_node, size);
4005
4006 chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
4007 dirflag, offs, size, safe);
4008 return;
4009 }
4010 break;
4011
4012 case VAR_DECL:
4013 case RESULT_DECL:
4014 case PARM_DECL:
4015 if (dirflag != integer_one_node
4016 || DECL_REGISTER (node))
4017 return;
4018
4019 safe = true;
4020 addr_first = chkp_build_addr_expr (node);
4021 break;
4022
4023 default:
4024 return;
4025 }
4026
4027 /* If addr_last was not computed then use (addr_first + size - 1)
4028 expression to compute it. */
4029 if (!addr_last)
4030 {
4031 addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
4032 addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
4033 }
4034
4035 /* Shift both first_addr and last_addr by access_offs if specified. */
4036 if (access_offs)
4037 {
4038 addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
4039 addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
4040 }
4041
4042 /* Generate bndcl/bndcu checks if memory access is not safe. */
4043 if (!safe)
4044 {
4045 gimple_stmt_iterator stmt_iter = *iter;
4046
4047 if (!bounds)
4048 bounds = chkp_find_bounds (ptr, iter);
4049
4050 chkp_check_mem_access (addr_first, addr_last, bounds,
4051 stmt_iter, loc, dirflag);
4052 }
4053
4054 /* We need to store bounds in case pointer is stored. */
4055 if (dirflag == integer_one_node
4056 && chkp_type_has_pointer (node_type)
4057 && flag_chkp_store_bounds)
4058 {
4059 gimple *stmt = gsi_stmt (*iter);
4060 tree rhs1 = gimple_assign_rhs1 (stmt);
4061 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4062
4063 if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
4064 chkp_walk_pointer_assignments (node, rhs1, iter,
4065 chkp_copy_bounds_for_elem);
4066 else
4067 {
4068 bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
4069 chkp_build_bndstx (addr_first, rhs1, bounds, iter);
4070 }
4071 }
4072 }
4073
4074 /* Add code to copy bounds for all pointers copied
4075 in ASSIGN created during inline of EDGE. */
4076 void
4077 chkp_copy_bounds_for_assign (gimple *assign, struct cgraph_edge *edge)
4078 {
4079 tree lhs = gimple_assign_lhs (assign);
4080 tree rhs = gimple_assign_rhs1 (assign);
4081 gimple_stmt_iterator iter = gsi_for_stmt (assign);
4082
4083 if (!flag_chkp_store_bounds)
4084 return;
4085
4086 chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
4087
4088 /* We should create edges for all created calls to bndldx and bndstx. */
4089 while (gsi_stmt (iter) != assign)
4090 {
4091 gimple *stmt = gsi_stmt (iter);
4092 if (gimple_code (stmt) == GIMPLE_CALL)
4093 {
4094 tree fndecl = gimple_call_fndecl (stmt);
4095 struct cgraph_node *callee = cgraph_node::get_create (fndecl);
4096 struct cgraph_edge *new_edge;
4097
4098 gcc_assert (fndecl == chkp_bndstx_fndecl
4099 || fndecl == chkp_bndldx_fndecl
4100 || fndecl == chkp_ret_bnd_fndecl);
4101
4102 new_edge = edge->caller->create_edge (callee,
4103 as_a <gcall *> (stmt),
4104 edge->count,
4105 edge->frequency);
4106 new_edge->frequency = compute_call_stmt_bb_frequency
4107 (edge->caller->decl, gimple_bb (stmt));
4108 }
4109 gsi_prev (&iter);
4110 }
4111 }
4112
4113 /* Some code transformation made during instrumentation pass
4114 may put code into inconsistent state. Here we find and fix
4115 such flaws. */
4116 void
4117 chkp_fix_cfg ()
4118 {
4119 basic_block bb;
4120 gimple_stmt_iterator i;
4121
4122 /* We could insert some code right after stmt which ends bb.
4123 We wanted to put this code on fallthru edge but did not
4124 add new edges from the beginning because it may cause new
4125 phi node creation which may be incorrect due to incomplete
4126 bound phi nodes. */
4127 FOR_ALL_BB_FN (bb, cfun)
4128 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4129 {
4130 gimple *stmt = gsi_stmt (i);
4131 gimple_stmt_iterator next = i;
4132
4133 gsi_next (&next);
4134
4135 if (stmt_ends_bb_p (stmt)
4136 && !gsi_end_p (next))
4137 {
4138 edge fall = find_fallthru_edge (bb->succs);
4139 basic_block dest = NULL;
4140 int flags = 0;
4141
4142 gcc_assert (fall);
4143
4144 /* We cannot split abnormal edge. Therefore we
4145 store its params, make it regular and then
4146 rebuild abnormal edge after split. */
4147 if (fall->flags & EDGE_ABNORMAL)
4148 {
4149 flags = fall->flags & ~EDGE_FALLTHRU;
4150 dest = fall->dest;
4151
4152 fall->flags &= ~EDGE_COMPLEX;
4153 }
4154
4155 while (!gsi_end_p (next))
4156 {
4157 gimple *next_stmt = gsi_stmt (next);
4158 gsi_remove (&next, false);
4159 gsi_insert_on_edge (fall, next_stmt);
4160 }
4161
4162 gsi_commit_edge_inserts ();
4163
4164 /* Re-create abnormal edge. */
4165 if (dest)
4166 make_edge (bb, dest, flags);
4167 }
4168 }
4169 }
4170
4171 /* Walker callback for chkp_replace_function_pointers. Replaces
4172 function pointer in the specified operand with pointer to the
4173 instrumented function version. */
4174 static tree
4175 chkp_replace_function_pointer (tree *op, int *walk_subtrees,
4176 void *data ATTRIBUTE_UNUSED)
4177 {
4178 if (TREE_CODE (*op) == FUNCTION_DECL
4179 && chkp_instrumentable_p (*op)
4180 && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
4181 /* For builtins we replace pointers only for selected
4182 function and functions having definitions. */
4183 || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
4184 && (chkp_instrument_normal_builtin (*op)
4185 || gimple_has_body_p (*op)))))
4186 {
4187 struct cgraph_node *node = cgraph_node::get_create (*op);
4188 struct cgraph_node *clone = NULL;
4189
4190 if (!node->instrumentation_clone)
4191 clone = chkp_maybe_create_clone (*op);
4192
4193 if (clone)
4194 *op = clone->decl;
4195 *walk_subtrees = 0;
4196 }
4197
4198 return NULL;
4199 }
4200
4201 /* This function searches for function pointers in statement
4202 pointed by GSI and replaces them with pointers to instrumented
4203 function versions. */
4204 static void
4205 chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
4206 {
4207 gimple *stmt = gsi_stmt (*gsi);
4208 /* For calls we want to walk call args only. */
4209 if (gimple_code (stmt) == GIMPLE_CALL)
4210 {
4211 unsigned i;
4212 for (i = 0; i < gimple_call_num_args (stmt); i++)
4213 walk_tree (gimple_call_arg_ptr (stmt, i),
4214 chkp_replace_function_pointer, NULL, NULL);
4215 }
4216 else
4217 walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
4218 }
4219
4220 /* This function instruments all statements working with memory,
4221 calls and rets.
4222
4223 It also removes excess statements from static initializers. */
4224 static void
4225 chkp_instrument_function (void)
4226 {
4227 basic_block bb, next;
4228 gimple_stmt_iterator i;
4229 enum gimple_rhs_class grhs_class;
4230 bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
4231
4232 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4233 do
4234 {
4235 next = bb->next_bb;
4236 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
4237 {
4238 gimple *s = gsi_stmt (i);
4239
4240 /* Skip statement marked to not be instrumented. */
4241 if (chkp_marked_stmt_p (s))
4242 {
4243 gsi_next (&i);
4244 continue;
4245 }
4246
4247 chkp_replace_function_pointers (&i);
4248
4249 switch (gimple_code (s))
4250 {
4251 case GIMPLE_ASSIGN:
4252 chkp_process_stmt (&i, gimple_assign_lhs (s),
4253 gimple_location (s), integer_one_node,
4254 NULL_TREE, NULL_TREE, safe);
4255 chkp_process_stmt (&i, gimple_assign_rhs1 (s),
4256 gimple_location (s), integer_zero_node,
4257 NULL_TREE, NULL_TREE, safe);
4258 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
4259 if (grhs_class == GIMPLE_BINARY_RHS)
4260 chkp_process_stmt (&i, gimple_assign_rhs2 (s),
4261 gimple_location (s), integer_zero_node,
4262 NULL_TREE, NULL_TREE, safe);
4263 break;
4264
4265 case GIMPLE_RETURN:
4266 {
4267 greturn *r = as_a <greturn *> (s);
4268 if (gimple_return_retval (r) != NULL_TREE)
4269 {
4270 chkp_process_stmt (&i, gimple_return_retval (r),
4271 gimple_location (r),
4272 integer_zero_node,
4273 NULL_TREE, NULL_TREE, safe);
4274
4275 /* Additionally we need to add bounds
4276 to return statement. */
4277 chkp_add_bounds_to_ret_stmt (&i);
4278 }
4279 }
4280 break;
4281
4282 case GIMPLE_CALL:
4283 chkp_add_bounds_to_call_stmt (&i);
4284 break;
4285
4286 default:
4287 ;
4288 }
4289
4290 gsi_next (&i);
4291
4292 /* We do not need any actual pointer stores in checker
4293 static initializer. */
4294 if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
4295 && gimple_code (s) == GIMPLE_ASSIGN
4296 && gimple_store_p (s))
4297 {
4298 gimple_stmt_iterator del_iter = gsi_for_stmt (s);
4299 gsi_remove (&del_iter, true);
4300 unlink_stmt_vdef (s);
4301 release_defs(s);
4302 }
4303 }
4304 bb = next;
4305 }
4306 while (bb);
4307
4308 /* Some input params may have bounds and be address taken. In this case
4309 we should store incoming bounds into bounds table. */
4310 tree arg;
4311 if (flag_chkp_store_bounds)
4312 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
4313 if (TREE_ADDRESSABLE (arg))
4314 {
4315 if (BOUNDED_P (arg))
4316 {
4317 tree bounds = chkp_get_next_bounds_parm (arg);
4318 tree def_ptr = ssa_default_def (cfun, arg);
4319 gimple_stmt_iterator iter
4320 = gsi_start_bb (chkp_get_entry_block ());
4321 chkp_build_bndstx (chkp_build_addr_expr (arg),
4322 def_ptr ? def_ptr : arg,
4323 bounds, &iter);
4324
4325 /* Skip bounds arg. */
4326 arg = TREE_CHAIN (arg);
4327 }
4328 else if (chkp_type_has_pointer (TREE_TYPE (arg)))
4329 {
4330 tree orig_arg = arg;
4331 bitmap slots = BITMAP_ALLOC (NULL);
4332 gimple_stmt_iterator iter
4333 = gsi_start_bb (chkp_get_entry_block ());
4334 bitmap_iterator bi;
4335 unsigned bnd_no;
4336
4337 chkp_find_bound_slots (TREE_TYPE (arg), slots);
4338
4339 EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
4340 {
4341 tree bounds = chkp_get_next_bounds_parm (arg);
4342 HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
4343 tree addr = chkp_build_addr_expr (orig_arg);
4344 tree ptr = build2 (MEM_REF, ptr_type_node, addr,
4345 build_int_cst (ptr_type_node, offs));
4346 chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
4347 bounds, &iter);
4348
4349 arg = DECL_CHAIN (arg);
4350 }
4351 BITMAP_FREE (slots);
4352 }
4353 }
4354 }
4355
4356 /* Find init/null/copy_ptr_bounds calls and replace them
4357 with assignments. It should allow better code
4358 optimization. */
4359
4360 static void
4361 chkp_remove_useless_builtins ()
4362 {
4363 basic_block bb;
4364 gimple_stmt_iterator gsi;
4365
4366 FOR_EACH_BB_FN (bb, cfun)
4367 {
4368 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4369 {
4370 gimple *stmt = gsi_stmt (gsi);
4371 tree fndecl;
4372 enum built_in_function fcode;
4373
4374 /* Find builtins returning first arg and replace
4375 them with assignments. */
4376 if (gimple_code (stmt) == GIMPLE_CALL
4377 && (fndecl = gimple_call_fndecl (stmt))
4378 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4379 && (fcode = DECL_FUNCTION_CODE (fndecl))
4380 && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
4381 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
4382 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
4383 || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
4384 {
4385 tree res = gimple_call_arg (stmt, 0);
4386 update_call_from_tree (&gsi, res);
4387 stmt = gsi_stmt (gsi);
4388 update_stmt (stmt);
4389 }
4390 }
4391 }
4392 }
4393
4394 /* Initialize pass. */
4395 static void
4396 chkp_init (void)
4397 {
4398 basic_block bb;
4399 gimple_stmt_iterator i;
4400
4401 in_chkp_pass = true;
4402
4403 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
4404 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
4405 chkp_unmark_stmt (gsi_stmt (i));
4406
4407 chkp_invalid_bounds = new hash_set<tree>;
4408 chkp_completed_bounds_set = new hash_set<tree>;
4409 delete chkp_reg_bounds;
4410 chkp_reg_bounds = new hash_map<tree, tree>;
4411 delete chkp_bound_vars;
4412 chkp_bound_vars = new hash_map<tree, tree>;
4413 chkp_reg_addr_bounds = new hash_map<tree, tree>;
4414 chkp_incomplete_bounds_map = new hash_map<tree, tree>;
4415 delete chkp_bounds_map;
4416 chkp_bounds_map = new hash_map<tree, tree>;
4417 chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
4418
4419 entry_block = NULL;
4420 zero_bounds = NULL_TREE;
4421 none_bounds = NULL_TREE;
4422 incomplete_bounds = integer_zero_node;
4423 tmp_var = NULL_TREE;
4424 size_tmp_var = NULL_TREE;
4425
4426 chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
4427
4428 /* We create these constant bounds once for each object file.
4429 These symbols go to comdat section and result in single copy
4430 of each one in the final binary. */
4431 chkp_get_zero_bounds_var ();
4432 chkp_get_none_bounds_var ();
4433
4434 calculate_dominance_info (CDI_DOMINATORS);
4435 calculate_dominance_info (CDI_POST_DOMINATORS);
4436
4437 bitmap_obstack_initialize (NULL);
4438 }
4439
4440 /* Finalize instrumentation pass. */
4441 static void
4442 chkp_fini (void)
4443 {
4444 in_chkp_pass = false;
4445
4446 delete chkp_invalid_bounds;
4447 delete chkp_completed_bounds_set;
4448 delete chkp_reg_addr_bounds;
4449 delete chkp_incomplete_bounds_map;
4450
4451 free_dominance_info (CDI_DOMINATORS);
4452 free_dominance_info (CDI_POST_DOMINATORS);
4453
4454 bitmap_obstack_release (NULL);
4455
4456 entry_block = NULL;
4457 zero_bounds = NULL_TREE;
4458 none_bounds = NULL_TREE;
4459 }
4460
4461 /* Main instrumentation pass function. */
4462 static unsigned int
4463 chkp_execute (void)
4464 {
4465 chkp_init ();
4466
4467 chkp_instrument_function ();
4468
4469 chkp_remove_useless_builtins ();
4470
4471 chkp_function_mark_instrumented (cfun->decl);
4472
4473 chkp_fix_cfg ();
4474
4475 chkp_fini ();
4476
4477 return 0;
4478 }
4479
4480 /* Instrumentation pass gate. */
4481 static bool
4482 chkp_gate (void)
4483 {
4484 cgraph_node *node = cgraph_node::get (cfun->decl);
4485 return ((node != NULL
4486 && node->instrumentation_clone)
4487 || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl)));
4488 }
4489
4490 namespace {
4491
4492 const pass_data pass_data_chkp =
4493 {
4494 GIMPLE_PASS, /* type */
4495 "chkp", /* name */
4496 OPTGROUP_NONE, /* optinfo_flags */
4497 TV_NONE, /* tv_id */
4498 PROP_ssa | PROP_cfg, /* properties_required */
4499 0, /* properties_provided */
4500 0, /* properties_destroyed */
4501 0, /* todo_flags_start */
4502 TODO_verify_il
4503 | TODO_update_ssa /* todo_flags_finish */
4504 };
4505
4506 class pass_chkp : public gimple_opt_pass
4507 {
4508 public:
4509 pass_chkp (gcc::context *ctxt)
4510 : gimple_opt_pass (pass_data_chkp, ctxt)
4511 {}
4512
4513 /* opt_pass methods: */
4514 virtual opt_pass * clone ()
4515 {
4516 return new pass_chkp (m_ctxt);
4517 }
4518
4519 virtual bool gate (function *)
4520 {
4521 return chkp_gate ();
4522 }
4523
4524 virtual unsigned int execute (function *)
4525 {
4526 return chkp_execute ();
4527 }
4528
4529 }; // class pass_chkp
4530
4531 } // anon namespace
4532
4533 gimple_opt_pass *
4534 make_pass_chkp (gcc::context *ctxt)
4535 {
4536 return new pass_chkp (ctxt);
4537 }
4538
4539 #include "gt-tree-chkp.h"