1 /* Consolidation of svalues and regions.
2 Copyright (C) 2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "diagnostic-core.h"
26 #include "gimple-pretty-print.h"
28 #include "basic-block.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
36 #include "stringpool.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
46 #include "analyzer/analyzer.h"
47 #include "analyzer/analyzer-logging.h"
48 #include "ordered-hash-map.h"
53 #include "analyzer/supergraph.h"
55 #include "analyzer/call-string.h"
56 #include "analyzer/program-point.h"
57 #include "analyzer/store.h"
58 #include "analyzer/region-model.h"
64 /* class region_model_manager. */
66 /* region_model_manager's ctor. */
68 region_model_manager::region_model_manager ()
69 : m_next_region_id (0),
70 m_root_region (alloc_region_id ()),
71 m_stack_region (alloc_region_id (), &m_root_region
),
72 m_heap_region (alloc_region_id (), &m_root_region
),
73 m_unknown_NULL (NULL
),
74 m_max_complexity (0, 0),
75 m_code_region (alloc_region_id (), &m_root_region
),
76 m_fndecls_map (), m_labels_map (),
77 m_globals_region (alloc_region_id (), &m_root_region
),
83 /* region_model_manager's dtor. Delete all of the managed svalues
86 region_model_manager::~region_model_manager ()
88 /* Delete consolidated svalues. */
89 for (constants_map_t::iterator iter
= m_constants_map
.begin ();
90 iter
!= m_constants_map
.end (); ++iter
)
91 delete (*iter
).second
;
92 for (unknowns_map_t::iterator iter
= m_unknowns_map
.begin ();
93 iter
!= m_unknowns_map
.end (); ++iter
)
94 delete (*iter
).second
;
95 delete m_unknown_NULL
;
96 for (setjmp_values_map_t::iterator iter
= m_setjmp_values_map
.begin ();
97 iter
!= m_setjmp_values_map
.end (); ++iter
)
98 delete (*iter
).second
;
99 for (poisoned_values_map_t::iterator iter
= m_poisoned_values_map
.begin ();
100 iter
!= m_poisoned_values_map
.end (); ++iter
)
101 delete (*iter
).second
;
102 for (initial_values_map_t::iterator iter
= m_initial_values_map
.begin ();
103 iter
!= m_initial_values_map
.end (); ++iter
)
104 delete (*iter
).second
;
105 for (pointer_values_map_t::iterator iter
= m_pointer_values_map
.begin ();
106 iter
!= m_pointer_values_map
.end (); ++iter
)
107 delete (*iter
).second
;
108 for (unaryop_values_map_t::iterator iter
= m_unaryop_values_map
.begin ();
109 iter
!= m_unaryop_values_map
.end (); ++iter
)
110 delete (*iter
).second
;
111 for (binop_values_map_t::iterator iter
= m_binop_values_map
.begin ();
112 iter
!= m_binop_values_map
.end (); ++iter
)
113 delete (*iter
).second
;
114 for (sub_values_map_t::iterator iter
= m_sub_values_map
.begin ();
115 iter
!= m_sub_values_map
.end (); ++iter
)
116 delete (*iter
).second
;
117 for (unmergeable_values_map_t::iterator iter
118 = m_unmergeable_values_map
.begin ();
119 iter
!= m_unmergeable_values_map
.end (); ++iter
)
120 delete (*iter
).second
;
121 for (widening_values_map_t::iterator iter
= m_widening_values_map
.begin ();
122 iter
!= m_widening_values_map
.end (); ++iter
)
123 delete (*iter
).second
;
124 for (compound_values_map_t::iterator iter
= m_compound_values_map
.begin ();
125 iter
!= m_compound_values_map
.end (); ++iter
)
126 delete (*iter
).second
;
127 for (conjured_values_map_t::iterator iter
= m_conjured_values_map
.begin ();
128 iter
!= m_conjured_values_map
.end (); ++iter
)
129 delete (*iter
).second
;
131 /* Delete consolidated regions. */
132 for (fndecls_map_t::iterator iter
= m_fndecls_map
.begin ();
133 iter
!= m_fndecls_map
.end (); ++iter
)
134 delete (*iter
).second
;
135 for (labels_map_t::iterator iter
= m_labels_map
.begin ();
136 iter
!= m_labels_map
.end (); ++iter
)
137 delete (*iter
).second
;
138 for (globals_map_t::iterator iter
= m_globals_map
.begin ();
139 iter
!= m_globals_map
.end (); ++iter
)
140 delete (*iter
).second
;
141 for (string_map_t::iterator iter
= m_string_map
.begin ();
142 iter
!= m_string_map
.end (); ++iter
)
143 delete (*iter
).second
;
146 /* Return true if C exceeds the complexity limit for svalues. */
149 region_model_manager::too_complex_p (const complexity
&c
) const
151 if (c
.m_max_depth
> (unsigned)param_analyzer_max_svalue_depth
)
156 /* If SVAL exceeds the complexity limit for svalues, delete it
158 Otherwise update m_max_complexity and return false. */
161 region_model_manager::reject_if_too_complex (svalue
*sval
)
163 const complexity
&c
= sval
->get_complexity ();
164 if (!too_complex_p (c
))
166 if (m_max_complexity
.m_num_nodes
< c
.m_num_nodes
)
167 m_max_complexity
.m_num_nodes
= c
.m_num_nodes
;
168 if (m_max_complexity
.m_max_depth
< c
.m_max_depth
)
169 m_max_complexity
.m_max_depth
= c
.m_max_depth
;
177 /* Macro for imposing a complexity limit on svalues, for use within
178 region_model_manager member functions.
180 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
181 value of the same type.
182 Otherwise update m_max_complexity and carry on. */
184 #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
186 svalue *sval_ = (SVAL); \
187 tree type_ = sval_->get_type (); \
188 if (reject_if_too_complex (sval_)) \
189 return get_or_create_unknown_svalue (type_); \
192 /* svalue consolidation. */
194 /* Return the svalue * for a constant_svalue for CST_EXPR,
195 creating it if necessary.
196 The constant_svalue instances are reused, based on pointer equality
200 region_model_manager::get_or_create_constant_svalue (tree cst_expr
)
202 gcc_assert (cst_expr
);
204 constant_svalue
**slot
= m_constants_map
.get (cst_expr
);
207 constant_svalue
*cst_sval
= new constant_svalue (cst_expr
);
208 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval
);
209 m_constants_map
.put (cst_expr
, cst_sval
);
213 /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
214 creating it if necessary.
215 The unknown_svalue instances are reused, based on pointer equality
219 region_model_manager::get_or_create_unknown_svalue (tree type
)
221 /* Special-case NULL, so that the hash_map can use NULL as the
223 if (type
== NULL_TREE
)
226 m_unknown_NULL
= new unknown_svalue (type
);
227 return m_unknown_NULL
;
230 unknown_svalue
**slot
= m_unknowns_map
.get (type
);
233 unknown_svalue
*sval
= new unknown_svalue (type
);
234 m_unknowns_map
.put (type
, sval
);
238 /* Return the svalue * for the initial value of REG, creating it if
242 region_model_manager::get_or_create_initial_value (const region
*reg
)
244 /* The initial value of a cast is a cast of the initial value. */
245 if (const cast_region
*cast_reg
= reg
->dyn_cast_cast_region ())
247 const region
*original_reg
= cast_reg
->get_original_region ();
248 return get_or_create_cast (cast_reg
->get_type (),
249 get_or_create_initial_value (original_reg
));
252 if (initial_svalue
**slot
= m_initial_values_map
.get (reg
))
254 initial_svalue
*initial_sval
= new initial_svalue (reg
->get_type (), reg
);
255 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval
);
256 m_initial_values_map
.put (reg
, initial_sval
);
260 /* Return the svalue * for R using type TYPE, creating it if
264 region_model_manager::get_or_create_setjmp_svalue (const setjmp_record
&r
,
267 setjmp_svalue::key_t
key (r
, type
);
268 if (setjmp_svalue
**slot
= m_setjmp_values_map
.get (key
))
270 setjmp_svalue
*setjmp_sval
= new setjmp_svalue (r
, type
);
271 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval
);
272 m_setjmp_values_map
.put (key
, setjmp_sval
);
276 /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
280 region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind
,
283 poisoned_svalue::key_t
key (kind
, type
);
284 if (poisoned_svalue
**slot
= m_poisoned_values_map
.get (key
))
286 poisoned_svalue
*poisoned_sval
= new poisoned_svalue (kind
, type
);
287 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval
);
288 m_poisoned_values_map
.put (key
, poisoned_sval
);
289 return poisoned_sval
;
292 /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
293 creating it if necessary. */
296 region_model_manager::get_ptr_svalue (tree ptr_type
, const region
*pointee
)
298 /* If this is a symbolic region from dereferencing a pointer, and the types
299 match, then return the original pointer. */
300 if (const symbolic_region
*sym_reg
= pointee
->dyn_cast_symbolic_region ())
301 if (ptr_type
== sym_reg
->get_pointer ()->get_type ())
302 return sym_reg
->get_pointer ();
304 region_svalue::key_t
key (ptr_type
, pointee
);
305 if (region_svalue
**slot
= m_pointer_values_map
.get (key
))
307 region_svalue
*sval
= new region_svalue (ptr_type
, pointee
);
308 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval
);
309 m_pointer_values_map
.put (key
, sval
);
313 /* Subroutine of region_model_manager::get_or_create_unaryop.
314 Attempt to fold the inputs and return a simpler svalue *.
315 Otherwise, return NULL. */
318 region_model_manager::maybe_fold_unaryop (tree type
, enum tree_code op
,
321 /* Ops on "unknown" are also unknown. */
322 if (arg
->get_kind () == SK_UNKNOWN
)
323 return get_or_create_unknown_svalue (type
);
328 case VIEW_CONVERT_EXPR
:
331 /* Handle redundant casts. */
333 && useless_type_conversion_p (arg
->get_type (), type
))
336 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
337 => "cast<TYPE> (innermost_arg)",
338 unless INNER_TYPE is narrower than TYPE. */
339 if (const svalue
*innermost_arg
= arg
->maybe_undo_cast ())
341 tree inner_type
= arg
->get_type ();
343 && TYPE_SIZE (inner_type
)
344 && (fold_binary (LE_EXPR
, boolean_type_node
,
345 TYPE_SIZE (type
), TYPE_SIZE (inner_type
))
346 == boolean_true_node
))
347 return maybe_fold_unaryop (type
, op
, innermost_arg
);
353 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
354 if (const binop_svalue
*binop
= arg
->dyn_cast_binop_svalue ())
355 if (TREE_CODE_CLASS (binop
->get_op ()) == tcc_comparison
)
357 enum tree_code inv_op
358 = invert_tree_comparison (binop
->get_op (),
359 HONOR_NANS (binop
->get_type ()));
360 if (inv_op
!= ERROR_MARK
)
361 return get_or_create_binop (binop
->get_type (), inv_op
,
370 if (tree cst
= arg
->maybe_get_constant ())
371 if (tree result
= fold_unary (op
, type
, cst
))
372 return get_or_create_constant_svalue (result
);
377 /* Return the svalue * for an unary operation OP on ARG with a result of
378 type TYPE, creating it if necessary. */
381 region_model_manager::get_or_create_unaryop (tree type
, enum tree_code op
,
384 if (const svalue
*folded
= maybe_fold_unaryop (type
, op
, arg
))
386 unaryop_svalue::key_t
key (type
, op
, arg
);
387 if (unaryop_svalue
**slot
= m_unaryop_values_map
.get (key
))
389 unaryop_svalue
*unaryop_sval
= new unaryop_svalue (type
, op
, arg
);
390 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval
);
391 m_unaryop_values_map
.put (key
, unaryop_sval
);
395 /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
396 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
397 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
398 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
401 static enum tree_code
402 get_code_for_cast (tree dst_type
, tree src_type
)
404 gcc_assert (dst_type
);
408 if (TREE_CODE (src_type
) == REAL_TYPE
)
410 if (TREE_CODE (dst_type
) == INTEGER_TYPE
)
411 return FIX_TRUNC_EXPR
;
413 return VIEW_CONVERT_EXPR
;
419 /* Return the svalue * for a cast of ARG to type TYPE, creating it
423 region_model_manager::get_or_create_cast (tree type
, const svalue
*arg
)
426 enum tree_code op
= get_code_for_cast (type
, arg
->get_type ());
427 return get_or_create_unaryop (type
, op
, arg
);
430 /* Subroutine of region_model_manager::get_or_create_binop.
431 Attempt to fold the inputs and return a simpler svalue *.
432 Otherwise, return NULL. */
435 region_model_manager::maybe_fold_binop (tree type
, enum tree_code op
,
439 tree cst0
= arg0
->maybe_get_constant ();
440 tree cst1
= arg1
->maybe_get_constant ();
444 if (tree result
= fold_binary (op
, type
, cst0
, cst1
))
445 if (CONSTANT_CLASS_P (result
))
446 return get_or_create_constant_svalue (result
);
449 if (FLOAT_TYPE_P (type
)
450 || (arg0
->get_type () && FLOAT_TYPE_P (arg0
->get_type ()))
451 || (arg1
->get_type () && FLOAT_TYPE_P (arg1
->get_type ())))
458 case POINTER_PLUS_EXPR
:
460 /* (VAL + 0) -> VAL. */
461 if (cst1
&& zerop (cst1
) && type
== arg0
->get_type ())
465 /* (VAL - 0) -> VAL. */
466 if (cst1
&& zerop (cst1
) && type
== arg0
->get_type ())
471 if (cst1
&& zerop (cst1
) && INTEGRAL_TYPE_P (type
))
472 return get_or_create_constant_svalue (build_int_cst (type
, 0));
473 /* (VAL * 1) -> VAL. */
474 if (cst1
&& integer_onep (cst1
))
479 if (zerop (cst1
) && INTEGRAL_TYPE_P (type
))
480 /* "(ARG0 & 0)" -> "0". */
481 return get_or_create_constant_svalue (build_int_cst (type
, 0));
483 case TRUTH_ANDIF_EXPR
:
487 if (zerop (cst1
) && INTEGRAL_TYPE_P (type
))
488 /* "(ARG0 && 0)" -> "0". */
489 return get_or_create_constant_svalue (build_int_cst (type
, 0));
491 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
492 return get_or_create_cast (type
, arg0
);
495 case TRUTH_ORIF_EXPR
:
500 /* "(ARG0 || 0)" -> "ARG0". */
501 return get_or_create_cast (type
, arg0
);
503 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
504 return get_or_create_cast (type
, arg1
);
509 /* For associative ops, fold "(X op CST_A) op CST_B)" to
510 "X op (CST_A op CST_B)". */
511 if (cst1
&& associative_tree_code (op
))
512 if (const binop_svalue
*binop
= arg0
->dyn_cast_binop_svalue ())
513 if (binop
->get_op () == op
514 && binop
->get_arg1 ()->maybe_get_constant ()
515 && type
== binop
->get_type ()
516 && type
== binop
->get_arg0 ()->get_type ()
517 && type
== binop
->get_arg1 ()->get_type ())
518 return get_or_create_binop
519 (type
, op
, binop
->get_arg0 (),
520 get_or_create_binop (type
, op
,
521 binop
->get_arg1 (), arg1
));
523 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
525 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
526 e.g. in data-model-1.c: test_4c. */
527 if (cst1
&& op
== POINTER_PLUS_EXPR
)
528 if (const binop_svalue
*binop
= arg0
->dyn_cast_binop_svalue ())
529 if (binop
->get_op () == POINTER_PLUS_EXPR
)
530 if (binop
->get_arg1 ()->maybe_get_constant ())
531 return get_or_create_binop
532 (type
, op
, binop
->get_arg0 (),
533 get_or_create_binop (size_type_node
, op
,
534 binop
->get_arg1 (), arg1
));
536 /* Ops on "unknown" are also unknown (unless we can use one of the
537 identities above). */
538 if (arg0
->get_kind () == SK_UNKNOWN
539 || arg1
->get_kind () == SK_UNKNOWN
)
540 return get_or_create_unknown_svalue (type
);
547 /* Return the svalue * for an binary operation OP on ARG0 and ARG1
548 with a result of type TYPE, creating it if necessary. */
551 region_model_manager::get_or_create_binop (tree type
, enum tree_code op
,
555 /* For commutative ops, put any constant on the RHS. */
556 if (arg0
->maybe_get_constant () && commutative_tree_code (op
))
557 std::swap (arg0
, arg1
);
559 if (const svalue
*folded
= maybe_fold_binop (type
, op
, arg0
, arg1
))
562 binop_svalue::key_t
key (type
, op
, arg0
, arg1
);
563 if (binop_svalue
**slot
= m_binop_values_map
.get (key
))
565 binop_svalue
*binop_sval
= new binop_svalue (type
, op
, arg0
, arg1
);
566 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval
);
567 m_binop_values_map
.put (key
, binop_sval
);
571 /* Subroutine of region_model_manager::get_or_create_sub_svalue.
572 Return a folded svalue, or NULL. */
575 region_model_manager::maybe_fold_sub_svalue (tree type
,
576 const svalue
*parent_svalue
,
577 const region
*subregion
)
579 /* Subvalues of "unknown" are unknown. */
580 if (parent_svalue
->get_kind () == SK_UNKNOWN
)
581 return get_or_create_unknown_svalue (type
);
583 /* If we have a subregion of a zero-fill, it's zero. */
584 if (const unaryop_svalue
*unary
585 = parent_svalue
->dyn_cast_unaryop_svalue ())
587 if (unary
->get_op () == NOP_EXPR
588 || unary
->get_op () == VIEW_CONVERT_EXPR
)
589 if (tree cst
= unary
->get_arg ()->maybe_get_constant ())
592 const svalue
*cst_sval
593 = get_or_create_constant_svalue (cst
);
594 return get_or_create_cast (type
, cst_sval
);
598 /* Handle getting individual chars from a STRING_CST. */
599 if (tree cst
= parent_svalue
->maybe_get_constant ())
600 if (TREE_CODE (cst
) == STRING_CST
)
601 if (const element_region
*element_reg
602 = subregion
->dyn_cast_element_region ())
604 const svalue
*idx_sval
= element_reg
->get_index ();
605 if (tree cst_idx
= idx_sval
->maybe_get_constant ())
606 if (const svalue
*char_sval
607 = maybe_get_char_from_string_cst (cst
, cst_idx
))
608 return get_or_create_cast (type
, char_sval
);
611 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
613 Subvalue(InitialValue(R1), FieldRegion(R2, F))
614 -> InitialValue(FieldRegion(R1, F)). */
615 if (const initial_svalue
*init_sval
616 = parent_svalue
->dyn_cast_initial_svalue ())
618 if (const field_region
*field_reg
= subregion
->dyn_cast_field_region ())
620 const region
*field_reg_new
621 = get_field_region (init_sval
->get_region (),
622 field_reg
->get_field ());
623 return get_or_create_initial_value (field_reg_new
);
630 /* Return the svalue * for extracting a subvalue of type TYPE from
631 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
634 region_model_manager::get_or_create_sub_svalue (tree type
,
635 const svalue
*parent_svalue
,
636 const region
*subregion
)
638 if (const svalue
*folded
639 = maybe_fold_sub_svalue (type
, parent_svalue
, subregion
))
642 sub_svalue::key_t
key (type
, parent_svalue
, subregion
);
643 if (sub_svalue
**slot
= m_sub_values_map
.get (key
))
646 = new sub_svalue (type
, parent_svalue
, subregion
);
647 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval
);
648 m_sub_values_map
.put (key
, sub_sval
);
652 /* Return the svalue * that decorates ARG as being unmergeable,
653 creating it if necessary. */
656 region_model_manager::get_or_create_unmergeable (const svalue
*arg
)
658 if (arg
->get_kind () == SK_UNMERGEABLE
)
661 if (unmergeable_svalue
**slot
= m_unmergeable_values_map
.get (arg
))
663 unmergeable_svalue
*unmergeable_sval
= new unmergeable_svalue (arg
);
664 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval
);
665 m_unmergeable_values_map
.put (arg
, unmergeable_sval
);
666 return unmergeable_sval
;
669 /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
670 and ITER_SVAL at POINT, creating it if necessary. */
673 region_model_manager::get_or_create_widening_svalue (tree type
,
674 const program_point
&point
,
675 const svalue
*base_sval
,
676 const svalue
*iter_sval
)
678 gcc_assert (base_sval
->get_kind () != SK_WIDENING
);
679 gcc_assert (iter_sval
->get_kind () != SK_WIDENING
);
680 widening_svalue::key_t
key (type
, point
, base_sval
, iter_sval
);
681 if (widening_svalue
**slot
= m_widening_values_map
.get (key
))
683 widening_svalue
*widening_sval
684 = new widening_svalue (type
, point
, base_sval
, iter_sval
);
685 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval
);
686 m_widening_values_map
.put (key
, widening_sval
);
687 return widening_sval
;
690 /* Return the svalue * of type TYPE for the compound values in MAP,
691 creating it if necessary. */
694 region_model_manager::get_or_create_compound_svalue (tree type
,
695 const binding_map
&map
)
697 compound_svalue::key_t
tmp_key (type
, &map
);
698 if (compound_svalue
**slot
= m_compound_values_map
.get (tmp_key
))
700 compound_svalue
*compound_sval
701 = new compound_svalue (type
, map
);
702 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval
);
703 /* Use make_key rather than reusing the key, so that we use a
704 ptr to compound_sval's binding_map, rather than the MAP param. */
705 m_compound_values_map
.put (compound_sval
->make_key (), compound_sval
);
706 return compound_sval
;
709 /* Return the svalue * of type TYPE for the value conjured for ID_REG
710 at STMT, creating it if necessary. */
713 region_model_manager::get_or_create_conjured_svalue (tree type
,
715 const region
*id_reg
)
717 conjured_svalue::key_t
key (type
, stmt
, id_reg
);
718 if (conjured_svalue
**slot
= m_conjured_values_map
.get (key
))
720 conjured_svalue
*conjured_sval
721 = new conjured_svalue (type
, stmt
, id_reg
);
722 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval
);
723 m_conjured_values_map
.put (key
, conjured_sval
);
724 return conjured_sval
;
727 /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
728 attempt to get the character at that offset, returning either
729 the svalue for the character constant, or NULL if unsuccessful. */
732 region_model_manager::maybe_get_char_from_string_cst (tree string_cst
,
733 tree byte_offset_cst
)
735 gcc_assert (TREE_CODE (string_cst
) == STRING_CST
);
737 /* Adapted from fold_read_from_constant_string. */
738 scalar_int_mode char_mode
;
739 if (TREE_CODE (byte_offset_cst
) == INTEGER_CST
740 && compare_tree_int (byte_offset_cst
,
741 TREE_STRING_LENGTH (string_cst
)) < 0
742 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst
))),
744 && GET_MODE_SIZE (char_mode
) == 1)
747 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst
)),
748 (TREE_STRING_POINTER (string_cst
)
749 [TREE_INT_CST_LOW (byte_offset_cst
)]));
750 return get_or_create_constant_svalue (char_cst
);
755 /* region consolidation. */
757 /* Return the region for FNDECL, creating it if necessary. */
759 const function_region
*
760 region_model_manager::get_region_for_fndecl (tree fndecl
)
762 gcc_assert (TREE_CODE (fndecl
) == FUNCTION_DECL
);
764 function_region
**slot
= m_fndecls_map
.get (fndecl
);
768 = new function_region (alloc_region_id (), &m_code_region
, fndecl
);
769 m_fndecls_map
.put (fndecl
, reg
);
773 /* Return the region for LABEL, creating it if necessary. */
776 region_model_manager::get_region_for_label (tree label
)
778 gcc_assert (TREE_CODE (label
) == LABEL_DECL
);
780 label_region
**slot
= m_labels_map
.get (label
);
784 tree fndecl
= DECL_CONTEXT (label
);
785 gcc_assert (fndecl
&& TREE_CODE (fndecl
) == FUNCTION_DECL
);
787 const function_region
*func_reg
= get_region_for_fndecl (fndecl
);
789 = new label_region (alloc_region_id (), func_reg
, label
);
790 m_labels_map
.put (label
, reg
);
794 /* Return the region for EXPR, creating it if necessary. */
797 region_model_manager::get_region_for_global (tree expr
)
799 gcc_assert (TREE_CODE (expr
) == VAR_DECL
);
801 decl_region
**slot
= m_globals_map
.get (expr
);
805 = new decl_region (alloc_region_id (), &m_globals_region
, expr
);
806 m_globals_map
.put (expr
, reg
);
810 /* Return the region that describes accessing field FIELD of PARENT,
811 creating it if necessary. */
814 region_model_manager::get_field_region (const region
*parent
, tree field
)
816 gcc_assert (TREE_CODE (field
) == FIELD_DECL
);
818 field_region::key_t
key (parent
, field
);
819 if (field_region
*reg
= m_field_regions
.get (key
))
822 field_region
*field_reg
823 = new field_region (alloc_region_id (), parent
, field
);
824 m_field_regions
.put (key
, field_reg
);
828 /* Return the region that describes accessing the element of type
829 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
832 region_model_manager::get_element_region (const region
*parent
,
836 element_region::key_t
key (parent
, element_type
, index
);
837 if (element_region
*reg
= m_element_regions
.get (key
))
840 element_region
*element_reg
841 = new element_region (alloc_region_id (), parent
, element_type
, index
);
842 m_element_regions
.put (key
, element_reg
);
846 /* Return the region that describes accessing the subregion of type
847 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
851 region_model_manager::get_offset_region (const region
*parent
,
853 const svalue
*byte_offset
)
855 /* If BYTE_OFFSET is zero, return PARENT. */
856 if (tree cst_offset
= byte_offset
->maybe_get_constant ())
857 if (zerop (cst_offset
))
858 return get_cast_region (parent
, type
);
860 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
861 to OFFSET_REGION(REG, (X + Y)). */
862 if (const offset_region
*parent_offset_reg
863 = parent
->dyn_cast_offset_region ())
865 const svalue
*sval_x
= parent_offset_reg
->get_byte_offset ();
866 const svalue
*sval_sum
867 = get_or_create_binop (byte_offset
->get_type (),
868 PLUS_EXPR
, sval_x
, byte_offset
);
869 return get_offset_region (parent
->get_parent_region (), type
, sval_sum
);
872 offset_region::key_t
key (parent
, type
, byte_offset
);
873 if (offset_region
*reg
= m_offset_regions
.get (key
))
876 offset_region
*offset_reg
877 = new offset_region (alloc_region_id (), parent
, type
, byte_offset
);
878 m_offset_regions
.put (key
, offset_reg
);
882 /* Return the region that describes accessing PARENT_REGION as if
883 it were of type TYPE, creating it if necessary. */
886 region_model_manager::get_cast_region (const region
*original_region
,
889 /* If types match, return ORIGINAL_REGION. */
890 if (type
== original_region
->get_type ())
891 return original_region
;
893 cast_region::key_t
key (original_region
, type
);
894 if (cast_region
*reg
= m_cast_regions
.get (key
))
897 cast_region
*cast_reg
898 = new cast_region (alloc_region_id (), original_region
, type
);
899 m_cast_regions
.put (key
, cast_reg
);
903 /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
904 if necessary. CALLING_FRAME may be NULL. */
907 region_model_manager::get_frame_region (const frame_region
*calling_frame
,
910 int index
= calling_frame
? calling_frame
->get_index () + 1 : 0;
912 frame_region::key_t
key (calling_frame
, fun
);
913 if (frame_region
*reg
= m_frame_regions
.get (key
))
916 frame_region
*frame_reg
917 = new frame_region (alloc_region_id (), &m_stack_region
, calling_frame
,
919 m_frame_regions
.put (key
, frame_reg
);
923 /* Return the region that describes dereferencing SVAL, creating it
927 region_model_manager::get_symbolic_region (const svalue
*sval
)
929 symbolic_region::key_t
key (&m_root_region
, sval
);
930 if (symbolic_region
*reg
= m_symbolic_regions
.get (key
))
933 symbolic_region
*symbolic_reg
934 = new symbolic_region (alloc_region_id (), &m_root_region
, sval
);
935 m_symbolic_regions
.put (key
, symbolic_reg
);
939 /* Return the region that describes accessing STRING_CST, creating it
942 const string_region
*
943 region_model_manager::get_region_for_string (tree string_cst
)
945 gcc_assert (TREE_CODE (string_cst
) == STRING_CST
);
947 string_region
**slot
= m_string_map
.get (string_cst
);
951 = new string_region (alloc_region_id (), &m_root_region
, string_cst
);
952 m_string_map
.put (string_cst
, reg
);
956 /* If we see a tree code we don't know how to handle, rather than
957 ICE or generate bogus results, create a dummy region, and notify
958 CTXT so that it can mark the new state as being not properly
959 modelled. The exploded graph can then stop exploring that path,
960 since any diagnostics we might issue will have questionable
964 region_model_manager::
965 get_region_for_unexpected_tree_code (region_model_context
*ctxt
,
967 const dump_location_t
&loc
)
969 tree type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
971 = new unknown_region (alloc_region_id (), &m_root_region
, type
);
973 ctxt
->on_unexpected_tree_code (t
, loc
);
977 /* Return a new region describing a heap-allocated block of memory. */
980 region_model_manager::create_region_for_heap_alloc ()
983 = new heap_allocated_region (alloc_region_id (), &m_heap_region
);
984 m_managed_dynamic_regions
.safe_push (reg
);
988 /* Return a new region describing a block of memory allocated within FRAME. */
991 region_model_manager::create_region_for_alloca (const frame_region
*frame
)
994 region
*reg
= new alloca_region (alloc_region_id (), frame
);
995 m_managed_dynamic_regions
.safe_push (reg
);
999 /* Log OBJ to LOGGER. */
1001 template <typename T
>
1003 log_managed_object (logger
*logger
, const T
*obj
)
1005 logger
->start_log_line ();
1006 pretty_printer
*pp
= logger
->get_printer ();
1007 pp_string (pp
, " ");
1008 obj
->dump_to_pp (pp
, true);
1009 logger
->end_log_line ();
1012 /* Specialization for frame_region, which also logs the count of locals
1013 managed by the frame_region. */
1017 log_managed_object (logger
*logger
, const frame_region
*obj
)
1019 logger
->start_log_line ();
1020 pretty_printer
*pp
= logger
->get_printer ();
1021 pp_string (pp
, " ");
1022 obj
->dump_to_pp (pp
, true);
1023 pp_printf (pp
, " [with %i region(s) for locals]", obj
->get_num_locals ());
1024 logger
->end_log_line ();
1027 /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1028 If SHOW_OBJS is true, also dump the objects themselves. */
1030 template <typename K
, typename T
>
1032 log_uniq_map (logger
*logger
, bool show_objs
, const char *title
,
1033 const hash_map
<K
, T
*> &uniq_map
)
1035 logger
->log (" # %s: %li", title
, uniq_map
.elements ());
1037 for (typename hash_map
<K
, T
*>::iterator iter
= uniq_map
.begin ();
1038 iter
!= uniq_map
.end (); ++iter
)
1040 T
*managed_obj
= (*iter
).second
;
1041 log_managed_object
<T
> (logger
, managed_obj
);
1045 /* Dump the number of objects that were managed by MAP to LOGGER.
1046 If SHOW_OBJS is true, also dump the objects themselves. */
1048 template <typename T
>
1050 log_uniq_map (logger
*logger
, bool show_objs
, const char *title
,
1051 const consolidation_map
<T
> &map
)
1053 logger
->log (" # %s: %li", title
, map
.elements ());
1055 for (typename consolidation_map
<T
>::iterator iter
= map
.begin ();
1056 iter
!= map
.end (); ++iter
)
1058 T
*managed_obj
= (*iter
).second
;
1059 log_managed_object
<T
> (logger
, managed_obj
);
1063 /* Dump the number of objects of each class that were managed by this
1065 If SHOW_OBJS is true, also dump the objects themselves. */
1068 region_model_manager::log_stats (logger
*logger
, bool show_objs
) const
1071 logger
->log ("svalue consolidation");
1072 log_uniq_map (logger
, show_objs
, "constant_svalue", m_constants_map
);
1073 log_uniq_map (logger
, show_objs
, "unknown_svalue", m_unknowns_map
);
1075 log_managed_object (logger
, m_unknown_NULL
);
1076 log_uniq_map (logger
, show_objs
, "poisoned_svalue", m_poisoned_values_map
);
1077 log_uniq_map (logger
, show_objs
, "setjmp_svalue", m_setjmp_values_map
);
1078 log_uniq_map (logger
, show_objs
, "initial_svalue", m_initial_values_map
);
1079 log_uniq_map (logger
, show_objs
, "region_svalue", m_pointer_values_map
);
1080 log_uniq_map (logger
, show_objs
, "unaryop_svalue", m_unaryop_values_map
);
1081 log_uniq_map (logger
, show_objs
, "binop_svalue", m_binop_values_map
);
1082 log_uniq_map (logger
, show_objs
, "sub_svalue", m_sub_values_map
);
1083 log_uniq_map (logger
, show_objs
, "unmergeable_svalue",
1084 m_unmergeable_values_map
);
1085 log_uniq_map (logger
, show_objs
, "widening_svalue", m_widening_values_map
);
1086 log_uniq_map (logger
, show_objs
, "compound_svalue", m_compound_values_map
);
1087 log_uniq_map (logger
, show_objs
, "conjured_svalue", m_conjured_values_map
);
1088 logger
->log ("max accepted svalue num_nodes: %i",
1089 m_max_complexity
.m_num_nodes
);
1090 logger
->log ("max accepted svalue max_depth: %i",
1091 m_max_complexity
.m_max_depth
);
1093 logger
->log ("region consolidation");
1094 logger
->log (" next region id: %i", m_next_region_id
);
1095 log_uniq_map (logger
, show_objs
, "function_region", m_fndecls_map
);
1096 log_uniq_map (logger
, show_objs
, "label_region", m_labels_map
);
1097 log_uniq_map (logger
, show_objs
, "decl_region for globals", m_globals_map
);
1098 log_uniq_map (logger
, show_objs
, "field_region", m_field_regions
);
1099 log_uniq_map (logger
, show_objs
, "element_region", m_element_regions
);
1100 log_uniq_map (logger
, show_objs
, "offset_region", m_offset_regions
);
1101 log_uniq_map (logger
, show_objs
, "cast_region", m_cast_regions
);
1102 log_uniq_map (logger
, show_objs
, "frame_region", m_frame_regions
);
1103 log_uniq_map (logger
, show_objs
, "symbolic_region", m_symbolic_regions
);
1104 log_uniq_map (logger
, show_objs
, "string_region", m_string_map
);
1105 logger
->log (" # managed dynamic regions: %i",
1106 m_managed_dynamic_regions
.length ());
1107 m_store_mgr
.log_stats (logger
, show_objs
);
1110 /* Dump the number of objects of each class that were managed by this
1112 If SHOW_OBJS is true, also dump the objects themselves.
1113 This is here so it can use log_uniq_map. */
1116 store_manager::log_stats (logger
*logger
, bool show_objs
) const
1119 log_uniq_map (logger
, show_objs
, "concrete_binding",
1120 m_concrete_binding_key_mgr
);
1121 log_uniq_map (logger
, show_objs
, "symbolic_binding",
1122 m_symbolic_binding_key_mgr
);
1127 #endif /* #if ENABLE_ANALYZER */