1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "basic-block.h"
28 #include "gimple-iterator.h"
29 #include "diagnostic-core.h"
34 #include "stringpool.h"
37 #include "fold-const.h"
38 #include "tree-pretty-print.h"
39 #include "diagnostic-color.h"
40 #include "diagnostic-metadata.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
52 #include "analyzer/supergraph.h"
54 #include "analyzer/region-model.h"
55 #include "analyzer/constraint-manager.h"
56 #include "diagnostic-event-id.h"
57 #include "analyzer/sm.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "analyzer/pending-diagnostic.h"
61 #include "analyzer/analyzer-selftests.h"
62 #include "stor-layout.h"
68 /* Dump T to PP in language-independent form, for debugging/logging/dumping
72 dump_tree (pretty_printer
*pp
, tree t
)
74 dump_generic_node (pp
, t
, 0, TDF_SLIM
, 0);
77 /* Dump T to PP in language-independent form in quotes, for
78 debugging/logging/dumping purposes. */
81 dump_quoted_tree (pretty_printer
*pp
, tree t
)
83 pp_begin_quote (pp
, pp_show_color (pp
));
85 pp_end_quote (pp
, pp_show_color (pp
));
88 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
89 calls within other pp_printf calls.
91 default_tree_printer handles 'T' and some other codes by calling
92 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
93 dump_generic_node calls pp_printf in various places, leading to
96 Ideally pp_printf could be made to be reentrant, but in the meantime
97 this function provides a workaround. */
100 print_quoted_type (pretty_printer
*pp
, tree t
)
102 pp_begin_quote (pp
, pp_show_color (pp
));
103 dump_generic_node (pp
, t
, 0, TDF_SLIM
, 0);
104 pp_end_quote (pp
, pp_show_color (pp
));
107 /* Dump this path_var to PP (which must support %E for trees).
109 Express the stack depth using an "@DEPTH" suffix, so e.g. given
116 - the "i" in "bar" would be "(i @ 0)"
117 - the "j" in "foo" would be "(j @ 1)". */
120 path_var::dump (pretty_printer
*pp
) const
122 if (m_tree
== NULL_TREE
)
123 pp_string (pp
, "NULL");
124 if (CONSTANT_CLASS_P (m_tree
))
125 pp_printf (pp
, "%qE", m_tree
);
127 pp_printf (pp
, "(%qE @ %i)", m_tree
, m_stack_depth
);
130 /* For use in printing a comma-separated list. */
133 dump_separator (pretty_printer
*pp
, bool *is_first
)
136 pp_string (pp
, ", ");
140 /* Concrete subclass of constraint_manager that wires it up to a region_model
141 (whilst allowing the constraint_manager and region_model to be somewhat
143 TODO: revisit this; maybe put the region_model * into the constraint_manager
146 class impl_constraint_manager
: public constraint_manager
149 impl_constraint_manager (region_model
*model
)
150 : constraint_manager (),
154 impl_constraint_manager (const impl_constraint_manager
&other
,
156 : constraint_manager (other
),
160 constraint_manager
*clone (region_model
*model
) const
162 return new impl_constraint_manager (*this, model
);
165 tree
maybe_get_constant (svalue_id sid
) const FINAL OVERRIDE
167 svalue
*svalue
= m_model
->get_svalue (sid
);
168 return svalue
->maybe_get_constant ();
171 svalue_id
get_sid_for_constant (tree cst
) const FINAL OVERRIDE
173 gcc_assert (CONSTANT_CLASS_P (cst
));
174 return m_model
->get_rvalue (cst
, NULL
);
177 int get_num_svalues () const FINAL OVERRIDE
179 return m_model
->get_num_svalues ();
183 region_model
*m_model
;
186 /* class svalue_id. */
188 /* Print this svalue_id to PP. */
191 svalue_id::print (pretty_printer
*pp
) const
194 pp_printf (pp
, "null");
196 pp_printf (pp
, "sv%i", m_idx
);
199 /* Print this svalue_id in .dot format to PP. */
202 svalue_id::dump_node_name_to_pp (pretty_printer
*pp
) const
204 gcc_assert (!null_p ());
205 pp_printf (pp
, "svalue_%i", m_idx
);
208 /* Assert that this object is valid (w.r.t. MODEL). */
211 svalue_id::validate (const region_model
&model
) const
213 gcc_assert (null_p () || m_idx
< (int)model
.get_num_svalues ());
216 /* class region_id. */
218 /* Print this region_id to PP. */
221 region_id::print (pretty_printer
*pp
) const
224 pp_printf (pp
, "null");
226 pp_printf (pp
, "r%i", m_idx
);
229 /* Print this region_id in .dot format to PP. */
232 region_id::dump_node_name_to_pp (pretty_printer
*pp
) const
234 gcc_assert (!null_p ());
235 pp_printf (pp
, "region_%i", m_idx
);
238 /* Assert that this object is valid (w.r.t. MODEL). */
241 region_id::validate (const region_model
&model
) const
243 gcc_assert (null_p () || m_idx
< (int)model
.get_num_regions ());
248 /* id_set<region_id>'s ctor. */
251 id_set
<region_id
>::id_set (const region_model
*model
)
252 : m_bitmap (model
->get_num_regions ())
254 bitmap_clear (m_bitmap
);
257 /* class svalue and its various subclasses. */
261 /* svalue's equality operator. Most of the work is done by the
262 a "compare_fields" implementation on each subclass. */
265 svalue::operator== (const svalue
&other
) const
267 enum svalue_kind this_kind
= get_kind ();
268 enum svalue_kind other_kind
= other
.get_kind ();
269 if (this_kind
!= other_kind
)
272 if (m_type
!= other
.m_type
)
281 const region_svalue
&this_sub
282 = (const region_svalue
&)*this;
283 const region_svalue
&other_sub
284 = (const region_svalue
&)other
;
285 return this_sub
.compare_fields (other_sub
);
290 const constant_svalue
&this_sub
291 = (const constant_svalue
&)*this;
292 const constant_svalue
&other_sub
293 = (const constant_svalue
&)other
;
294 return this_sub
.compare_fields (other_sub
);
299 const unknown_svalue
&this_sub
300 = (const unknown_svalue
&)*this;
301 const unknown_svalue
&other_sub
302 = (const unknown_svalue
&)other
;
303 return this_sub
.compare_fields (other_sub
);
308 const poisoned_svalue
&this_sub
309 = (const poisoned_svalue
&)*this;
310 const poisoned_svalue
&other_sub
311 = (const poisoned_svalue
&)other
;
312 return this_sub
.compare_fields (other_sub
);
317 const setjmp_svalue
&this_sub
318 = (const setjmp_svalue
&)*this;
319 const setjmp_svalue
&other_sub
320 = (const setjmp_svalue
&)other
;
321 return this_sub
.compare_fields (other_sub
);
327 /* Generate a hash value for this svalue. Most of the work is done by the
328 add_to_hash vfunc. */
331 svalue::hash () const
333 inchash::hash hstate
;
335 hstate
.add_int (TYPE_UID (m_type
));
336 add_to_hash (hstate
);
337 return hstate
.end ();
340 /* Print this svalue and its ID to PP. */
343 svalue::print (const region_model
&model
,
345 pretty_printer
*pp
) const
348 pp_string (pp
, ": {");
352 gcc_assert (TYPE_P (m_type
));
353 pp_string (pp
, "type: ");
354 print_quoted_type (pp
, m_type
);
355 pp_string (pp
, ", ");
359 print_details (model
, this_sid
, pp
);
364 /* Dump this svalue in the form of a .dot record to PP. */
367 svalue::dump_dot_to_pp (const region_model
&model
,
369 pretty_printer
*pp
) const
371 this_sid
.dump_node_name_to_pp (pp
);
372 pp_printf (pp
, " [label=\"");
373 pp_write_text_to_stream (pp
);
375 pp_string (pp
, ": {");
376 print (model
, this_sid
, pp
);
377 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
378 pp_string (pp
, "}\"];");
382 /* Base implementation of svalue::remap_region_ids vfunc. */
385 svalue::remap_region_ids (const region_id_map
&)
390 /* Base implementation of svalue::walk_for_canonicalization vfunc. */
393 svalue::walk_for_canonicalization (canonicalization
*) const
398 /* Base implementation of svalue::get_child_sid vfunc. */
401 svalue::get_child_sid (region
*parent ATTRIBUTE_UNUSED
,
404 region_model_context
*ctxt ATTRIBUTE_UNUSED
)
406 svalue
*new_child_value
= clone ();
407 if (child
->get_type ())
408 new_child_value
->m_type
= child
->get_type ();
409 svalue_id new_child_sid
= model
.add_svalue (new_child_value
);
410 return new_child_sid
;
413 /* If this svalue is a constant_svalue, return the underlying tree constant.
414 Otherwise return NULL_TREE. */
417 svalue::maybe_get_constant () const
419 if (const constant_svalue
*cst_sval
= dyn_cast_constant_svalue ())
420 return cst_sval
->get_constant ();
425 /* class region_svalue : public svalue. */
427 /* Compare the fields of this region_svalue with OTHER, returning true
429 For use by svalue::operator==. */
432 region_svalue::compare_fields (const region_svalue
&other
) const
434 return m_rid
== other
.m_rid
;
437 /* Implementation of svalue::add_to_hash vfunc for region_svalue. */
440 region_svalue::add_to_hash (inchash::hash
&hstate
) const
442 inchash::add (m_rid
, hstate
);
445 /* Implementation of svalue::print_details vfunc for region_svalue. */
448 region_svalue::print_details (const region_model
&model ATTRIBUTE_UNUSED
,
449 svalue_id this_sid ATTRIBUTE_UNUSED
,
450 pretty_printer
*pp
) const
453 pp_string (pp
, "NULL");
461 /* Implementation of svalue::dump_dot_to_pp for region_svalue. */
464 region_svalue::dump_dot_to_pp (const region_model
&model
,
466 pretty_printer
*pp
) const
468 svalue::dump_dot_to_pp (model
, this_sid
, pp
);
470 /* If non-NULL, add an edge to the pointed-to region. */
471 if (!m_rid
.null_p ())
473 this_sid
.dump_node_name_to_pp (pp
);
474 pp_string (pp
, " -> ");
475 m_rid
.dump_node_name_to_pp (pp
);
481 /* Implementation of svalue::remap_region_ids vfunc for region_svalue. */
484 region_svalue::remap_region_ids (const region_id_map
&map
)
489 /* Merge REGION_SVAL_A and REGION_SVAL_B using MERGER, writing the result
493 region_svalue::merge_values (const region_svalue
®ion_sval_a
,
494 const region_svalue
®ion_sval_b
,
495 svalue_id
*merged_sid
,
497 model_merger
*merger
)
499 region_id a_rid
= region_sval_a
.get_pointee ();
500 region_id b_rid
= region_sval_b
.get_pointee ();
502 /* Both are non-NULL. */
503 gcc_assert (!a_rid
.null_p () && !b_rid
.null_p ());
505 /* Have these ptr-values already been merged? */
508 = merger
->m_map_regions_from_a_to_m
.get_dst_for_src (a_rid
);
510 = merger
->m_map_regions_from_b_to_m
.get_dst_for_src (b_rid
);
512 /* "null_p" here means "we haven't seen this ptr-value before".
513 If we've seen one but not the other, or we have different
514 regions, then the merged ptr has to be "unknown". */
515 if (a_rid_in_m
!= b_rid_in_m
)
517 svalue
*merged_sval
= new unknown_svalue (type
);
518 *merged_sid
= merger
->m_merged_model
->add_svalue (merged_sval
);
522 /* Have we seen this yet? If so, reuse the value. */
523 if (!a_rid_in_m
.null_p ())
526 = merger
->m_merged_model
->get_or_create_ptr_svalue (type
, a_rid_in_m
);
530 /* Otherwise we have A/B regions that haven't been referenced yet. */
532 /* Are the regions the "same", when seen from the tree point-of-view.
533 If so, create a merged pointer to it. */
534 path_var pv_a
= merger
->m_model_a
->get_representative_path_var (a_rid
);
535 path_var pv_b
= merger
->m_model_b
->get_representative_path_var (b_rid
);
539 region_id merged_pointee_rid
540 = merger
->m_merged_model
->get_lvalue (pv_a
, NULL
);
542 = merger
->m_merged_model
->get_or_create_ptr_svalue (type
,
544 merger
->record_regions (a_rid
, b_rid
, merged_pointee_rid
);
548 /* Handle an A/B pair of ptrs that both point at heap regions.
549 If they both have a heap region in the merger model, merge them. */
550 region
*region_a
= merger
->m_model_a
->get_region (a_rid
);
551 region
*region_b
= merger
->m_model_b
->get_region (b_rid
);
552 region_id a_parent_rid
= region_a
->get_parent ();
553 region_id b_parent_rid
= region_b
->get_parent ();
554 region
*parent_region_a
= merger
->m_model_a
->get_region (a_parent_rid
);
555 region
*parent_region_b
= merger
->m_model_b
->get_region (b_parent_rid
);
558 && parent_region_a
->get_kind () == RK_HEAP
559 && parent_region_b
->get_kind () == RK_HEAP
)
561 /* We have an A/B pair of ptrs that both point at heap regions. */
562 /* presumably we want to see if each A/B heap region already
563 has a merged region, and, if so, is it the same one. */
564 // This check is above
566 region_id merged_pointee_rid
567 = merger
->m_merged_model
->add_new_malloc_region ();
569 = merger
->m_merged_model
->get_or_create_ptr_svalue
570 (type
, merged_pointee_rid
);
571 merger
->record_regions (a_rid
, b_rid
, merged_pointee_rid
);
575 /* Two different non-NULL pointers? Merge to unknown. */
576 svalue
*merged_sval
= new unknown_svalue (type
);
577 *merged_sid
= merger
->m_merged_model
->add_svalue (merged_sval
);
581 /* Implementation of svalue::walk_for_canonicalization vfunc for
585 region_svalue::walk_for_canonicalization (canonicalization
*c
) const
590 /* Evaluate the condition LHS OP RHS.
591 Subroutine of region_model::eval_condition for when we have a pair of
595 region_svalue::eval_condition (region_svalue
*lhs
,
599 /* See if they point to the same region. */
600 /* TODO: what about child regions where the child is the first child
602 region_id lhs_rid
= lhs
->get_pointee ();
603 region_id rhs_rid
= rhs
->get_pointee ();
610 if (lhs_rid
== rhs_rid
)
611 return tristate::TS_TRUE
;
613 return tristate::TS_FALSE
;
617 if (lhs_rid
!= rhs_rid
)
618 return tristate::TS_TRUE
;
620 return tristate::TS_FALSE
;
625 if (lhs_rid
== rhs_rid
)
626 return tristate::TS_TRUE
;
631 if (lhs_rid
== rhs_rid
)
632 return tristate::TS_FALSE
;
636 return tristate::TS_UNKNOWN
;
639 /* class constant_svalue : public svalue. */
641 /* Compare the fields of this constant_svalue with OTHER, returning true
643 For use by svalue::operator==. */
646 constant_svalue::compare_fields (const constant_svalue
&other
) const
648 return m_cst_expr
== other
.m_cst_expr
;
651 /* Implementation of svalue::add_to_hash vfunc for constant_svalue. */
654 constant_svalue::add_to_hash (inchash::hash
&hstate
) const
656 inchash::add_expr (m_cst_expr
, hstate
);
659 /* Merge the CST_SVAL_A and CST_SVAL_B using MERGER, writing the id of
660 the resulting svalue into *MERGED_SID. */
663 constant_svalue::merge_values (const constant_svalue
&cst_sval_a
,
664 const constant_svalue
&cst_sval_b
,
665 svalue_id
*merged_sid
,
666 model_merger
*merger
)
668 tree cst_a
= cst_sval_a
.get_constant ();
669 tree cst_b
= cst_sval_b
.get_constant ();
673 /* If they are the same constant, merge as that constant value. */
674 merged_sval
= new constant_svalue (cst_a
);
678 /* Otherwise, we have two different constant values.
679 Merge as an unknown value.
680 TODO: impose constraints on the value?
681 (maybe just based on A, to avoid infinite chains) */
682 merged_sval
= new unknown_svalue (TREE_TYPE (cst_a
));
684 *merged_sid
= merger
->m_merged_model
->add_svalue (merged_sval
);
687 /* Evaluate the condition LHS OP RHS.
688 Subroutine of region_model::eval_condition for when we have a pair of
692 constant_svalue::eval_condition (constant_svalue
*lhs
,
694 constant_svalue
*rhs
)
696 tree lhs_const
= lhs
->get_constant ();
697 tree rhs_const
= rhs
->get_constant ();
699 gcc_assert (CONSTANT_CLASS_P (lhs_const
));
700 gcc_assert (CONSTANT_CLASS_P (rhs_const
));
702 /* Check for comparable types. */
703 if (types_compatible_p (TREE_TYPE (lhs_const
), TREE_TYPE (rhs_const
)))
706 = fold_binary (op
, boolean_type_node
, lhs_const
, rhs_const
);
707 if (comparison
== boolean_true_node
)
708 return tristate (tristate::TS_TRUE
);
709 if (comparison
== boolean_false_node
)
710 return tristate (tristate::TS_FALSE
);
712 return tristate::TS_UNKNOWN
;
715 /* Implementation of svalue::print_details vfunc for constant_svalue. */
718 constant_svalue::print_details (const region_model
&model ATTRIBUTE_UNUSED
,
719 svalue_id this_sid ATTRIBUTE_UNUSED
,
720 pretty_printer
*pp
) const
722 pp_printf (pp
, "%qE", m_cst_expr
);
725 /* Implementation of svalue::get_child_sid vfunc for constant_svalue. */
728 constant_svalue::get_child_sid (region
*parent ATTRIBUTE_UNUSED
,
731 region_model_context
*ctxt ATTRIBUTE_UNUSED
)
733 /* TODO: handle the all-zeroes case by returning an all-zeroes of the
736 /* Otherwise, we don't have a good way to get a child value out of a
739 Handle this case by using an unknown value. */
740 svalue
*unknown_sval
= new unknown_svalue (child
->get_type ());
741 return model
.add_svalue (unknown_sval
);
744 /* class unknown_svalue : public svalue. */
746 /* Compare the fields of this unknown_svalue with OTHER, returning true
748 For use by svalue::operator==. */
751 unknown_svalue::compare_fields (const unknown_svalue
&) const
753 /* I *think* we want to return true here, in that when comparing
754 two region models, we want two peer unknown_svalue instances
759 /* Implementation of svalue::add_to_hash vfunc for unknown_svalue. */
762 unknown_svalue::add_to_hash (inchash::hash
&) const
767 /* Implementation of svalue::print_details vfunc for unknown_svalue. */
770 unknown_svalue::print_details (const region_model
&model ATTRIBUTE_UNUSED
,
771 svalue_id this_sid ATTRIBUTE_UNUSED
,
772 pretty_printer
*pp
) const
774 pp_string (pp
, "unknown");
777 /* Get a string for KIND for use in debug dumps. */
780 poison_kind_to_str (enum poison_kind kind
)
786 case POISON_KIND_UNINIT
:
788 case POISON_KIND_FREED
:
790 case POISON_KIND_POPPED_STACK
:
791 return "popped stack";
795 /* class poisoned_svalue : public svalue. */
797 /* Compare the fields of this poisoned_svalue with OTHER, returning true
799 For use by svalue::operator==. */
802 poisoned_svalue::compare_fields (const poisoned_svalue
&other
) const
804 return m_kind
== other
.m_kind
;
807 /* Implementation of svalue::add_to_hash vfunc for poisoned_svalue. */
810 poisoned_svalue::add_to_hash (inchash::hash
&hstate
) const
812 hstate
.add_int (m_kind
);
815 /* Implementation of svalue::print_details vfunc for poisoned_svalue. */
818 poisoned_svalue::print_details (const region_model
&model ATTRIBUTE_UNUSED
,
819 svalue_id this_sid ATTRIBUTE_UNUSED
,
820 pretty_printer
*pp
) const
822 pp_printf (pp
, "poisoned: %s", poison_kind_to_str (m_kind
));
825 /* class setjmp_svalue's implementation is in engine.cc, so that it can use
826 the declaration of exploded_node. */
828 /* class region and its various subclasses. */
830 /* Get a string for KIND for use in debug dumps. */
833 region_kind_to_str (enum region_kind kind
)
868 /* Equality operator for region.
869 After comparing base class fields and kind, the rest of the
870 comparison is handled off to a "compare_fields" member function
871 specific to the appropriate subclass. */
874 region::operator== (const region
&other
) const
876 if (m_parent_rid
!= other
.m_parent_rid
)
878 if (m_sval_id
!= other
.m_sval_id
)
880 if (m_type
!= other
.m_type
)
883 enum region_kind this_kind
= get_kind ();
884 enum region_kind other_kind
= other
.get_kind ();
885 if (this_kind
!= other_kind
)
889 if (m_view_rids
.length () != other
.m_view_rids
.length ())
893 FOR_EACH_VEC_ELT (m_view_rids
, i
, rid
)
894 if (! (*rid
== other
.m_view_rids
[i
]))
906 const primitive_region
&this_sub
907 = (const primitive_region
&)*this;
908 const primitive_region
&other_sub
909 = (const primitive_region
&)other
;
910 return this_sub
.compare_fields (other_sub
);
915 const struct_region
&this_sub
916 = (const struct_region
&)*this;
917 const struct_region
&other_sub
918 = (const struct_region
&)other
;
919 return this_sub
.compare_fields (other_sub
);
923 const union_region
&this_sub
924 = (const union_region
&)*this;
925 const union_region
&other_sub
926 = (const union_region
&)other
;
927 return this_sub
.compare_fields (other_sub
);
931 const array_region
&this_sub
932 = (const array_region
&)*this;
933 const array_region
&other_sub
934 = (const array_region
&)other
;
935 return this_sub
.compare_fields (other_sub
);
939 const frame_region
&this_sub
940 = (const frame_region
&)*this;
941 const frame_region
&other_sub
942 = (const frame_region
&)other
;
943 return this_sub
.compare_fields (other_sub
);
947 const globals_region
&this_sub
948 = (const globals_region
&)*this;
949 const globals_region
&other_sub
950 = (const globals_region
&)other
;
951 return this_sub
.compare_fields (other_sub
);
955 const code_region
&this_sub
956 = (const code_region
&)*this;
957 const code_region
&other_sub
958 = (const code_region
&)other
;
959 return this_sub
.compare_fields (other_sub
);
963 const function_region
&this_sub
964 = (const function_region
&)*this;
965 const function_region
&other_sub
966 = (const function_region
&)other
;
967 return this_sub
.compare_fields (other_sub
);
971 const stack_region
&this_sub
972 = (const stack_region
&)*this;
973 const stack_region
&other_sub
974 = (const stack_region
&)other
;
975 return this_sub
.compare_fields (other_sub
);
979 const root_region
&this_sub
980 = (const root_region
&)*this;
981 const root_region
&other_sub
982 = (const root_region
&)other
;
983 return this_sub
.compare_fields (other_sub
);
987 const symbolic_region
&this_sub
988 = (const symbolic_region
&)*this;
989 const symbolic_region
&other_sub
990 = (const symbolic_region
&)other
;
991 return this_sub
.compare_fields (other_sub
);
995 const heap_region
&this_sub
996 = (const heap_region
&)*this;
997 const heap_region
&other_sub
998 = (const heap_region
&)other
;
999 return this_sub
.compare_fields (other_sub
);
1004 /* Get the parent region of this region. */
1007 region::get_parent_region (const region_model
&model
) const
1009 return model
.get_region (m_parent_rid
);
1012 /* Set this region's value to RHS_SID (or potentially a variant of it,
1013 for some kinds of casts). */
1016 region::set_value (region_model
&model
, region_id this_rid
, svalue_id rhs_sid
,
1017 region_model_context
*ctxt
)
1019 /* Handle some kinds of casting. */
1022 svalue
*sval
= model
.get_svalue (rhs_sid
);
1023 if (sval
->get_type ())
1024 rhs_sid
= model
.maybe_cast (m_type
, rhs_sid
, ctxt
);
1026 sval
= model
.get_svalue (rhs_sid
);
1027 if (sval
->get_type ())
1028 gcc_assert (m_type
== sval
->get_type ());
1031 m_sval_id
= rhs_sid
;
1034 If this is a view, it becomes its parent's active view.
1035 If there was already an active views, invalidate its value; otherwise
1036 if the parent itself had a value, invalidate it.
1037 If it's not a view, then deactivate any view that is active on this
1041 become_active_view (model
, this_rid
);
1044 deactivate_any_active_view (model
);
1045 gcc_assert (m_active_view_rid
.null_p ());
1050 /* Make this region (with id THIS_RID) the "active" view of its parent.
1051 Any other active view has its value set to "unknown" and descendent values
1053 If there wasn't an active view, then set the parent's value to unknown, and
1054 clear its descendent values (apart from this view). */
1057 region::become_active_view (region_model
&model
, region_id this_rid
)
1059 gcc_assert (m_is_view
);
1061 region
*parent_reg
= model
.get_region (m_parent_rid
);
1062 gcc_assert (parent_reg
);
1064 region_id old_active_view_rid
= parent_reg
->m_active_view_rid
;
1066 if (old_active_view_rid
== this_rid
)
1068 /* Already the active view: do nothing. */
1072 /* We have a change of active view. */
1073 parent_reg
->m_active_view_rid
= this_rid
;
1075 if (old_active_view_rid
.null_p ())
1077 /* No previous active view, but the parent and its other children
1079 If so, invalidate those values - but not that of the new view. */
1080 region_id_set
below_region (&model
);
1081 model
.get_descendents (m_parent_rid
, &below_region
, this_rid
);
1082 for (unsigned i
= 0; i
< model
.get_num_regions (); i
++)
1084 region_id
rid (region_id::from_int (i
));
1085 if (below_region
.region_p (rid
))
1087 region
*other_reg
= model
.get_region (rid
);
1088 other_reg
->m_sval_id
= svalue_id::null ();
1091 region
*parent
= model
.get_region (m_parent_rid
);
1093 = model
.add_svalue (new unknown_svalue (parent
->get_type ()));
1097 /* If there was an active view, invalidate it. */
1098 region
*old_active_view
= model
.get_region (old_active_view_rid
);
1099 old_active_view
->deactivate_view (model
, old_active_view_rid
);
1103 /* If this region (with id THIS_RID) has an active view, deactivate it,
1104 clearing m_active_view_rid. */
1107 region::deactivate_any_active_view (region_model
&model
)
1109 if (m_active_view_rid
.null_p ())
1111 region
*view
= model
.get_region (m_active_view_rid
);
1112 view
->deactivate_view (model
, m_active_view_rid
);
1113 m_active_view_rid
= region_id::null ();
1116 /* Clear any values for regions below THIS_RID.
1117 Set the view's value to unknown. */
1120 region::deactivate_view (region_model
&model
, region_id this_view_rid
)
1122 gcc_assert (is_view_p ());
1124 /* Purge values from old_active_this_view_rid and all its
1125 descendents. Potentially we could use a poison value
1126 for this, but let's use unknown for now. */
1127 region_id_set
below_view (&model
);
1128 model
.get_descendents (this_view_rid
, &below_view
, region_id::null ());
1130 for (unsigned i
= 0; i
< model
.get_num_regions (); i
++)
1132 region_id
rid (region_id::from_int (i
));
1133 if (below_view
.region_p (rid
))
1135 region
*other_reg
= model
.get_region (rid
);
1136 other_reg
->m_sval_id
= svalue_id::null ();
1140 m_sval_id
= model
.add_svalue (new unknown_svalue (get_type ()));
1143 /* Get a value for this region, either its value if it has one,
1144 or, failing that, "inherit" a value from first ancestor with a
1147 For example, when getting the value for a local variable within
1148 a stack frame that doesn't have one, the frame doesn't have a value
1149 either, but the stack as a whole will have an "uninitialized" poison
1150 value, so inherit that. */
1153 region::get_value (region_model
&model
, bool non_null
,
1154 region_model_context
*ctxt
)
1156 /* If this region has a value, use it. */
1157 if (!m_sval_id
.null_p ())
1160 /* Otherwise, "inherit" value from first ancestor with a
1163 region
*parent
= model
.get_region (m_parent_rid
);
1166 svalue_id inherited_sid
1167 = parent
->get_inherited_child_sid (this, model
, ctxt
);
1168 if (!inherited_sid
.null_p ())
1169 return inherited_sid
;
1172 /* If a non-null value has been requested, then generate
1173 a new unknown value. Store it, so that repeated reads from this
1174 region will yield the same unknown value. */
1177 svalue_id unknown_sid
= model
.add_svalue (new unknown_svalue (m_type
));
1178 m_sval_id
= unknown_sid
;
1182 return svalue_id::null ();
1185 /* Get a value for CHILD, inheriting from this region.
1187 Recurse, so this region will inherit a value if it doesn't already
1191 region::get_inherited_child_sid (region
*child
,
1192 region_model
&model
,
1193 region_model_context
*ctxt
)
1195 if (m_sval_id
.null_p ())
1198 if (!m_parent_rid
.null_p ())
1200 region
*parent
= model
.get_region (m_parent_rid
);
1201 m_sval_id
= parent
->get_inherited_child_sid (this, model
, ctxt
);
1205 if (!m_sval_id
.null_p ())
1207 /* Clone the parent's value, so that attempts to update it
1208 (e.g giving a specific value to an inherited "uninitialized"
1209 value) touch the child, and not the parent. */
1210 svalue
*this_value
= model
.get_svalue (m_sval_id
);
1211 svalue_id new_child_sid
1212 = this_value
->get_child_sid (this, child
, model
, ctxt
);
1214 ctxt
->on_inherited_svalue (m_sval_id
, new_child_sid
);
1215 child
->m_sval_id
= new_child_sid
;
1216 return new_child_sid
;
1219 return svalue_id::null ();
1222 /* Generate a hash value for this region. The work is done by the
1223 add_to_hash vfunc. */
1226 region::hash () const
1228 inchash::hash hstate
;
1229 add_to_hash (hstate
);
1230 return hstate
.end ();
1233 /* Print a one-liner representation of this region to PP, assuming
1234 that this region is within MODEL and its id is THIS_RID. */
1237 region::print (const region_model
&model
,
1239 pretty_printer
*pp
) const
1241 this_rid
.print (pp
);
1242 pp_string (pp
, ": {");
1245 print_fields (model
, this_rid
, pp
);
1247 pp_string (pp
, "}");
1250 /* Base class implementation of region::dump_dot_to_pp vfunc. */
1253 region::dump_dot_to_pp (const region_model
&model
,
1255 pretty_printer
*pp
) const
1257 this_rid
.dump_node_name_to_pp (pp
);
1258 pp_printf (pp
, " [shape=none,margin=0,style=filled,fillcolor=%s,label=\"",
1260 pp_write_text_to_stream (pp
);
1261 print (model
, this_rid
, pp
);
1262 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
1263 pp_string (pp
, "\"];");
1266 /* Add edge to svalue. */
1267 if (!m_sval_id
.null_p ())
1269 this_rid
.dump_node_name_to_pp (pp
);
1270 pp_string (pp
, " -> ");
1271 m_sval_id
.dump_node_name_to_pp (pp
);
1272 pp_string (pp
, ";");
1276 /* Add edge to parent. */
1277 if (!m_parent_rid
.null_p ())
1279 this_rid
.dump_node_name_to_pp (pp
);
1280 pp_string (pp
, " -> ");
1281 m_parent_rid
.dump_node_name_to_pp (pp
);
1282 pp_string (pp
, ";");
1287 /* Dump a tree-like ASCII-art representation of this region to PP. */
1290 region::dump_to_pp (const region_model
&model
,
1294 bool is_last_child
) const
1296 print (model
, this_rid
, pp
);
1299 const char *new_prefix
;
1300 if (!m_parent_rid
.null_p ())
1301 new_prefix
= ACONCAT ((prefix
, is_last_child
? " " : "| ", NULL
));
1303 new_prefix
= prefix
;
1305 const char *begin_color
= colorize_start (pp_show_color (pp
), "note");
1306 const char *end_color
= colorize_stop (pp_show_color (pp
));
1308 = ACONCAT ((begin_color
, new_prefix
, "|:", end_color
, NULL
));
1310 if (!m_sval_id
.null_p ())
1312 pp_printf (pp
, "%s sval: ", field_prefix
);
1313 model
.get_svalue (m_sval_id
)->print (model
, m_sval_id
, pp
);
1318 pp_printf (pp
, "%s type: ", field_prefix
);
1319 print_quoted_type (pp
, m_type
);
1323 /* Find the children. */
1325 auto_vec
<region_id
> child_rids
;
1327 for (unsigned i
= 0; i
< model
.get_num_regions (); ++i
)
1329 region_id rid
= region_id::from_int (i
);
1330 region
*child
= model
.get_region (rid
);
1331 if (child
->m_parent_rid
== this_rid
)
1332 child_rids
.safe_push (rid
);
1335 /* Print the children, using dump_child_label to label them. */
1337 region_id
*child_rid
;
1338 FOR_EACH_VEC_ELT (child_rids
, i
, child_rid
)
1340 is_last_child
= (i
== child_rids
.length () - 1);
1341 if (!this_rid
.null_p ())
1343 const char *tail
= is_last_child
? "`-" : "|-";
1344 pp_printf (pp
, "%r%s%s%R", "note", new_prefix
, tail
);
1346 dump_child_label (model
, this_rid
, *child_rid
, pp
);
1347 model
.get_region (*child_rid
)->dump_to_pp (model
, *child_rid
, pp
,
1353 /* Base implementation of region::dump_child_label vfunc. */
1356 region::dump_child_label (const region_model
&model
,
1357 region_id this_rid ATTRIBUTE_UNUSED
,
1358 region_id child_rid
,
1359 pretty_printer
*pp
) const
1361 region
*child
= model
.get_region (child_rid
);
1362 if (child
->m_is_view
)
1364 gcc_assert (TYPE_P (child
->get_type ()));
1365 if (m_active_view_rid
== child_rid
)
1366 pp_string (pp
, "active ");
1368 pp_string (pp
, "inactive ");
1369 pp_string (pp
, "view as ");
1370 print_quoted_type (pp
, child
->get_type ());
1371 pp_string (pp
, ": ");
1375 /* Base implementation of region::validate vfunc.
1376 Assert that the fields of "region" are valid; subclasses should
1377 chain up their implementation to this one. */
1380 region::validate (const region_model
&model
) const
1382 m_parent_rid
.validate (model
);
1383 m_sval_id
.validate (model
);
1385 region_id
*view_rid
;
1386 FOR_EACH_VEC_ELT (m_view_rids
, i
, view_rid
)
1388 gcc_assert (!view_rid
->null_p ());
1389 view_rid
->validate (model
);
1391 m_active_view_rid
.validate (model
);
1394 /* Apply MAP to svalue_ids to this region. This updates the value
1395 for the region (if any). */
1398 region::remap_svalue_ids (const svalue_id_map
&map
)
1400 map
.update (&m_sval_id
);
1403 /* Base implementation of region::remap_region_ids vfunc; subclasses should
1404 chain up to this, updating any region_id data. */
1407 region::remap_region_ids (const region_id_map
&map
)
1409 map
.update (&m_parent_rid
);
1411 region_id
*view_rid
;
1412 FOR_EACH_VEC_ELT (m_view_rids
, i
, view_rid
)
1413 map
.update (view_rid
);
1414 map
.update (&m_active_view_rid
);
1417 /* Add a new region with id VIEW_RID as a view of this region. */
1420 region::add_view (region_id view_rid
, region_model
*model
)
1422 gcc_assert (!view_rid
.null_p ());
1423 region
*new_view
= model
->get_region (view_rid
);
1424 new_view
->m_is_view
= true;
1425 gcc_assert (!new_view
->m_parent_rid
.null_p ());
1426 gcc_assert (new_view
->m_sval_id
.null_p ());
1428 //gcc_assert (new_view->get_type () != NULL_TREE);
1429 // TODO: this can sometimes be NULL, when viewing through a (void *)
1431 // TODO: the type ought to not be present yet
1433 m_view_rids
.safe_push (view_rid
);
1436 /* Look for a view of type TYPE of this region, returning its id if found,
1437 or null otherwise. */
1440 region::get_view (tree type
, region_model
*model
) const
1443 region_id
*view_rid
;
1444 FOR_EACH_VEC_ELT (m_view_rids
, i
, view_rid
)
1446 region
*view
= model
->get_region (*view_rid
);
1447 gcc_assert (view
->m_is_view
);
1448 if (view
->get_type () == type
)
1451 return region_id::null ();
1454 /* region's ctor. */
1456 region::region (region_id parent_rid
, svalue_id sval_id
, tree type
)
1457 : m_parent_rid (parent_rid
), m_sval_id (sval_id
), m_type (type
),
1458 m_view_rids (), m_is_view (false), m_active_view_rid (region_id::null ())
1460 gcc_assert (type
== NULL_TREE
|| TYPE_P (type
));
1463 /* region's copy ctor. */
1465 region::region (const region
&other
)
1466 : m_parent_rid (other
.m_parent_rid
), m_sval_id (other
.m_sval_id
),
1467 m_type (other
.m_type
), m_view_rids (other
.m_view_rids
.length ()),
1468 m_is_view (other
.m_is_view
), m_active_view_rid (other
.m_active_view_rid
)
1472 FOR_EACH_VEC_ELT (other
.m_view_rids
, i
, rid
)
1473 m_view_rids
.quick_push (*rid
);
1476 /* Base implementation of region::add_to_hash vfunc; subclasses should
1477 chain up to this. */
1480 region::add_to_hash (inchash::hash
&hstate
) const
1482 inchash::add (m_parent_rid
, hstate
);
1483 inchash::add (m_sval_id
, hstate
);
1484 hstate
.add_ptr (m_type
);
1488 /* Base implementation of region::print_fields vfunc. */
1491 region::print_fields (const region_model
&model ATTRIBUTE_UNUSED
,
1492 region_id this_rid ATTRIBUTE_UNUSED
,
1493 pretty_printer
*pp
) const
1495 pp_printf (pp
, "kind: %qs", region_kind_to_str (get_kind ()));
1497 pp_string (pp
, ", parent: ");
1498 m_parent_rid
.print (pp
);
1500 pp_printf (pp
, ", sval: ");
1501 m_sval_id
.print (pp
);
1505 pp_printf (pp
, ", type: ");
1506 print_quoted_type (pp
, m_type
);
1510 /* Determine if a pointer to this region must be non-NULL.
1512 Generally, pointers to regions must be non-NULL, but pointers
1513 to symbolic_regions might, in fact, be NULL.
1515 This allows us to simulate functions like malloc and calloc with:
1516 - only one "outcome" from each statement,
1517 - the idea that the pointer is on the heap if non-NULL
1518 - the possibility that the pointer could be NULL
1519 - the idea that successive values returned from malloc are non-equal
1520 - to be able to zero-fill for calloc. */
1523 region::non_null_p (const region_model
&model
) const
1525 /* Look through views to get at the underlying region. */
1527 return model
.get_region (m_parent_rid
)->non_null_p (model
);
1529 /* Are we within a symbolic_region? If so, it could be NULL. */
1530 if (const symbolic_region
*sym_reg
= dyn_cast_symbolic_region ())
1532 if (sym_reg
->m_possibly_null
)
1539 /* class primitive_region : public region. */
1541 /* Implementation of region::clone vfunc for primitive_region. */
1544 primitive_region::clone () const
1546 return new primitive_region (*this);
1549 /* Implementation of region::walk_for_canonicalization vfunc for
1550 primitive_region. */
1553 primitive_region::walk_for_canonicalization (canonicalization
*) const
1558 /* class map_region : public region. */
1560 /* map_region's copy ctor. */
1562 map_region::map_region (const map_region
&other
)
1568 /* Compare the fields of this map_region with OTHER, returning true
1570 For use by region::operator==. */
1573 map_region::compare_fields (const map_region
&other
) const
1575 if (m_map
.elements () != other
.m_map
.elements ())
1578 for (map_t::iterator iter
= m_map
.begin ();
1579 iter
!= m_map
.end ();
1582 tree key
= (*iter
).first
;
1583 region_id e
= (*iter
).second
;
1584 region_id
*other_slot
= const_cast <map_t
&> (other
.m_map
).get (key
);
1585 if (other_slot
== NULL
)
1587 if (e
!= *other_slot
)
1593 /* Implementation of region::print_fields vfunc for map_region. */
1596 map_region::print_fields (const region_model
&model
,
1598 pretty_printer
*pp
) const
1600 region::print_fields (model
, this_rid
, pp
);
1601 pp_string (pp
, ", map: {");
1602 for (map_t::iterator iter
= m_map
.begin ();
1603 iter
!= m_map
.end ();
1606 if (iter
!= m_map
.begin ())
1607 pp_string (pp
, ", ");
1608 tree expr
= (*iter
).first
;
1609 region_id child_rid
= (*iter
).second
;
1610 dump_quoted_tree (pp
, expr
);
1611 pp_string (pp
, ": ");
1612 child_rid
.print (pp
);
1614 pp_string (pp
, "}");
1617 /* Implementation of region::validate vfunc for map_region. */
1620 map_region::validate (const region_model
&model
) const
1622 region::validate (model
);
1623 for (map_t::iterator iter
= m_map
.begin ();
1624 iter
!= m_map
.end ();
1627 region_id child_rid
= (*iter
).second
;
1628 child_rid
.validate (model
);
1632 /* Implementation of region::dump_dot_to_pp vfunc for map_region. */
1635 map_region::dump_dot_to_pp (const region_model
&model
,
1637 pretty_printer
*pp
) const
1639 region::dump_dot_to_pp (model
, this_rid
, pp
);
1640 for (map_t::iterator iter
= m_map
.begin ();
1641 iter
!= m_map
.end ();
1644 // TODO: add nodes/edges to label things
1646 tree expr
= (*iter
).first
;
1647 region_id child_rid
= (*iter
).second
;
1649 pp_printf (pp
, "rid_label_%i [label=\"", child_rid
.as_int ());
1650 pp_write_text_to_stream (pp
);
1651 pp_printf (pp
, "%qE", expr
);
1652 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
1653 pp_string (pp
, "\"];");
1656 pp_printf (pp
, "rid_label_%i", child_rid
.as_int ());
1657 pp_string (pp
, " -> ");
1658 child_rid
.dump_node_name_to_pp (pp
);
1659 pp_string (pp
, ";");
1664 /* Implementation of region::dump_child_label vfunc for map_region. */
1667 map_region::dump_child_label (const region_model
&model
,
1669 region_id child_rid
,
1670 pretty_printer
*pp
) const
1672 region::dump_child_label (model
, this_rid
, child_rid
, pp
);
1674 for (map_t::iterator iter
= m_map
.begin ();
1675 iter
!= m_map
.end ();
1678 if (child_rid
== (*iter
).second
)
1680 tree key
= (*iter
).first
;
1681 dump_quoted_tree (pp
, key
);
1682 pp_string (pp
, ": ");
1687 /* Look for a child region for KEY within this map_region.
1688 If it doesn't already exist, create a child map_region, using TYPE for
1690 Return the region_id of the child (whether pre-existing, or
1692 Notify CTXT if we don't know how to handle TYPE. */
1695 map_region::get_or_create (region_model
*model
,
1699 region_model_context
*ctxt
)
1702 gcc_assert (valid_key_p (key
));
1703 region_id
*slot
= m_map
.get (key
);
1706 region_id child_rid
= model
->add_region_for_type (this_rid
, type
, ctxt
);
1707 m_map
.put (key
, child_rid
);
1711 /* Get the region_id for the child region for KEY within this
1712 MAP_REGION, or NULL if there is no such child region. */
1715 map_region::get (tree key
)
1718 gcc_assert (valid_key_p (key
));
1719 region_id
*slot
= m_map
.get (key
);
1723 /* Implementation of region::add_to_hash vfunc for map_region. */
1726 map_region::add_to_hash (inchash::hash
&hstate
) const
1728 region::add_to_hash (hstate
);
1732 /* Implementation of region::remap_region_ids vfunc for map_region. */
1735 map_region::remap_region_ids (const region_id_map
&map
)
1737 region::remap_region_ids (map
);
1739 /* Remap the region ids within the map entries. */
1740 for (map_t::iterator iter
= m_map
.begin ();
1741 iter
!= m_map
.end (); ++iter
)
1742 map
.update (&(*iter
).second
);
1745 /* Remove the binding of KEY to its child region (but not the
1746 child region itself).
1747 For use when purging unneeded SSA names. */
1750 map_region::unbind (tree key
)
1753 gcc_assert (valid_key_p (key
));
1757 /* Look for a child region with id CHILD_RID within this map_region.
1758 If one is found, return its tree key, otherwise return NULL_TREE. */
1761 map_region::get_tree_for_child_region (region_id child_rid
) const
1763 // TODO: do we want to store an inverse map?
1764 for (map_t::iterator iter
= m_map
.begin ();
1765 iter
!= m_map
.end ();
1768 tree key
= (*iter
).first
;
1769 region_id r
= (*iter
).second
;
1777 /* Look for a child region CHILD within this map_region.
1778 If one is found, return its tree key, otherwise return NULL_TREE. */
1781 map_region::get_tree_for_child_region (region
*child
,
1782 const region_model
&model
) const
1784 // TODO: do we want to store an inverse map?
1785 for (map_t::iterator iter
= m_map
.begin ();
1786 iter
!= m_map
.end ();
1789 tree key
= (*iter
).first
;
1790 region_id r
= (*iter
).second
;
1791 if (model
.get_region (r
) == child
)
1798 /* Comparator for trees to impose a deterministic ordering on
1802 tree_cmp (const_tree t1
, const_tree t2
)
1807 /* Test tree codes first. */
1808 if (TREE_CODE (t1
) != TREE_CODE (t2
))
1809 return TREE_CODE (t1
) - TREE_CODE (t2
);
1811 /* From this point on, we know T1 and T2 have the same tree code. */
1815 if (DECL_NAME (t1
) && DECL_NAME (t2
))
1816 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1
)),
1817 IDENTIFIER_POINTER (DECL_NAME (t2
)));
1822 else if (DECL_NAME (t2
))
1825 return DECL_UID (t1
) - DECL_UID (t2
);
1829 switch (TREE_CODE (t1
))
1833 if (SSA_NAME_VAR (t1
) && SSA_NAME_VAR (t2
))
1835 int var_cmp
= tree_cmp (SSA_NAME_VAR (t1
), SSA_NAME_VAR (t2
));
1838 return SSA_NAME_VERSION (t1
) - SSA_NAME_VERSION (t2
);
1842 if (SSA_NAME_VAR (t1
))
1844 else if (SSA_NAME_VAR (t2
))
1847 return SSA_NAME_VERSION (t1
) - SSA_NAME_VERSION (t2
);
1853 return tree_int_cst_compare (t1
, t2
);
1857 const real_value
*rv1
= TREE_REAL_CST_PTR (t1
);
1858 const real_value
*rv2
= TREE_REAL_CST_PTR (t2
);
1859 if (real_compare (UNORDERED_EXPR
, rv1
, rv2
))
1861 /* Impose an arbitrary order on NaNs relative to other NaNs
1863 if (int cmp_isnan
= real_isnan (rv1
) - real_isnan (rv2
))
1865 if (int cmp_issignaling_nan
1866 = real_issignaling_nan (rv1
) - real_issignaling_nan (rv2
))
1867 return cmp_issignaling_nan
;
1868 return real_isneg (rv1
) - real_isneg (rv2
);
1870 if (real_compare (LT_EXPR
, rv1
, rv2
))
1872 if (real_compare (GT_EXPR
, rv1
, rv2
))
1878 return strcmp (TREE_STRING_POINTER (t1
),
1879 TREE_STRING_POINTER (t2
));
1891 /* qsort comparator for trees to impose a deterministic ordering on
1895 tree_cmp (const void *p1
, const void *p2
)
1897 const_tree t1
= *(const_tree
const *)p1
;
1898 const_tree t2
= *(const_tree
const *)p2
;
1900 return tree_cmp (t1
, t2
);
1903 /* Attempt to merge MAP_REGION_A and MAP_REGION_B into MERGED_MAP_REGION,
1904 which has region_id MERGED_RID, using MERGER.
1905 Return true if the merger is possible, false otherwise. */
1908 map_region::can_merge_p (const map_region
*map_region_a
,
1909 const map_region
*map_region_b
,
1910 map_region
*merged_map_region
,
1911 region_id merged_rid
,
1912 model_merger
*merger
)
1914 for (map_t::iterator iter
= map_region_a
->m_map
.begin ();
1915 iter
!= map_region_a
->m_map
.end ();
1918 tree key_a
= (*iter
).first
;
1919 region_id rid_a
= (*iter
).second
;
1921 if (const region_id
*slot_b
1922 = const_cast<map_region
*>(map_region_b
)->m_map
.get (key_a
))
1924 region_id rid_b
= *slot_b
;
1926 region
*child_region_a
= merger
->get_region_a
<region
> (rid_a
);
1927 region
*child_region_b
= merger
->get_region_b
<region
> (rid_b
);
1929 gcc_assert (child_region_a
->get_type ()
1930 == child_region_b
->get_type ());
1932 gcc_assert (child_region_a
->get_kind ()
1933 == child_region_b
->get_kind ());
1935 region_id child_merged_rid
1936 = merged_map_region
->get_or_create (merger
->m_merged_model
,
1939 child_region_a
->get_type (),
1942 region
*child_merged_region
1943 = merger
->m_merged_model
->get_region (child_merged_rid
);
1945 /* Consider values. */
1946 svalue_id child_a_sid
= child_region_a
->get_value_direct ();
1947 svalue_id child_b_sid
= child_region_b
->get_value_direct ();
1948 svalue_id child_merged_sid
;
1949 if (!merger
->can_merge_values_p (child_a_sid
, child_b_sid
,
1952 if (!child_merged_sid
.null_p ())
1953 child_merged_region
->set_value (*merger
->m_merged_model
,
1958 if (map_region
*map_region_a
= child_region_a
->dyn_cast_map_region ())
1961 if (!can_merge_p (map_region_a
,
1962 as_a
<map_region
*> (child_region_b
),
1963 as_a
<map_region
*> (child_merged_region
),
1972 /* TODO: region is present in A, but absent in B. */
1976 /* TODO: check for keys in B that aren't in A. */
1982 /* Implementation of region::walk_for_canonicalization vfunc for
1986 map_region::walk_for_canonicalization (canonicalization
*c
) const
1988 auto_vec
<tree
> keys (m_map
.elements ());
1989 for (map_t::iterator iter
= m_map
.begin ();
1990 iter
!= m_map
.end ();
1993 tree key_a
= (*iter
).first
;
1994 keys
.quick_push (key_a
);
1996 keys
.qsort (tree_cmp
);
2000 FOR_EACH_VEC_ELT (keys
, i
, key
)
2002 region_id rid
= *const_cast<map_region
*>(this)->m_map
.get (key
);
2007 /* For debugging purposes: look for a child region for a decl named
2008 IDENTIFIER (or an SSA_NAME for such a decl), returning its value,
2009 or svalue_id::null if none are found. */
2012 map_region::get_value_by_name (tree identifier
,
2013 const region_model
&model
) const
2015 for (map_t::iterator iter
= m_map
.begin ();
2016 iter
!= m_map
.end ();
2019 tree key
= (*iter
).first
;
2020 if (TREE_CODE (key
) == SSA_NAME
)
2021 if (SSA_NAME_VAR (key
))
2022 key
= SSA_NAME_VAR (key
);
2024 if (DECL_NAME (key
) == identifier
)
2026 region_id rid
= (*iter
).second
;
2027 region
*region
= model
.get_region (rid
);
2028 return region
->get_value (const_cast<region_model
&>(model
),
2032 return svalue_id::null ();
2035 /* class struct_or_union_region : public map_region. */
2037 /* Implementation of map_region::valid_key_p vfunc for
2038 struct_or_union_region. */
2041 struct_or_union_region::valid_key_p (tree key
) const
2043 return TREE_CODE (key
) == FIELD_DECL
;
2046 /* Compare the fields of this struct_or_union_region with OTHER, returning
2047 true if they are equal.
2048 For use by region::operator==. */
2051 struct_or_union_region::compare_fields (const struct_or_union_region
&other
)
2054 return map_region::compare_fields (other
);
2057 /* class struct_region : public struct_or_union_region. */
2059 /* Implementation of region::clone vfunc for struct_region. */
2062 struct_region::clone () const
2064 return new struct_region (*this);
2067 /* Compare the fields of this struct_region with OTHER, returning true
2069 For use by region::operator==. */
2072 struct_region::compare_fields (const struct_region
&other
) const
2074 return struct_or_union_region::compare_fields (other
);
2077 /* class union_region : public struct_or_union_region. */
2079 /* Implementation of region::clone vfunc for union_region. */
2082 union_region::clone () const
2084 return new union_region (*this);
2087 /* Compare the fields of this union_region with OTHER, returning true
2089 For use by region::operator==. */
2092 union_region::compare_fields (const union_region
&other
) const
2094 return struct_or_union_region::compare_fields (other
);
2097 /* class frame_region : public map_region. */
2099 /* Compare the fields of this frame_region with OTHER, returning true
2101 For use by region::operator==. */
2104 frame_region::compare_fields (const frame_region
&other
) const
2106 if (!map_region::compare_fields (other
))
2108 if (m_fun
!= other
.m_fun
)
2110 if (m_depth
!= other
.m_depth
)
2115 /* Implementation of region::clone vfunc for frame_region. */
2118 frame_region::clone () const
2120 return new frame_region (*this);
2123 /* Implementation of map_region::valid_key_p vfunc for frame_region. */
2126 frame_region::valid_key_p (tree key
) const
2128 // TODO: could also check that VAR_DECLs are locals
2129 return (TREE_CODE (key
) == PARM_DECL
2130 || TREE_CODE (key
) == VAR_DECL
2131 || TREE_CODE (key
) == SSA_NAME
2132 || TREE_CODE (key
) == RESULT_DECL
);
2135 /* Implementation of region::print_fields vfunc for frame_region. */
2138 frame_region::print_fields (const region_model
&model
,
2140 pretty_printer
*pp
) const
2142 map_region::print_fields (model
, this_rid
, pp
);
2143 pp_printf (pp
, ", function: %qs, depth: %i", function_name (m_fun
), m_depth
);
2146 /* Implementation of region::add_to_hash vfunc for frame_region. */
2149 frame_region::add_to_hash (inchash::hash
&hstate
) const
2151 map_region::add_to_hash (hstate
);
2152 hstate
.add_ptr (m_fun
);
2153 hstate
.add_int (m_depth
);
2156 /* class globals_region : public scope_region. */
2158 /* Compare the fields of this globals_region with OTHER, returning true
2160 For use by region::operator==. */
2163 globals_region::compare_fields (const globals_region
&other
) const
2165 return map_region::compare_fields (other
);
2168 /* Implementation of region::clone vfunc for globals_region. */
2171 globals_region::clone () const
2173 return new globals_region (*this);
2176 /* Implementation of map_region::valid_key_p vfunc for globals_region. */
2179 globals_region::valid_key_p (tree key
) const
2181 return TREE_CODE (key
) == VAR_DECL
;
2184 /* class code_region : public map_region. */
2186 /* Compare the fields of this code_region with OTHER, returning true
2188 For use by region::operator==. */
2191 code_region::compare_fields (const code_region
&other
) const
2193 return map_region::compare_fields (other
);
2196 /* Implementation of region::clone vfunc for code_region. */
2199 code_region::clone () const
2201 return new code_region (*this);
2204 /* Implementation of map_region::valid_key_p vfunc for code_region. */
2207 code_region::valid_key_p (tree key
) const
2209 return TREE_CODE (key
) == FUNCTION_DECL
;
2212 /* class array_region : public region. */
2214 /* array_region's copy ctor. */
2216 array_region::array_region (const array_region
&other
)
2222 /* Get a child region for the element with index INDEX_SID. */
2225 array_region::get_element (region_model
*model
,
2227 svalue_id index_sid
,
2228 region_model_context
*ctxt
)
2230 tree element_type
= TREE_TYPE (get_type ());
2231 svalue
*index_sval
= model
->get_svalue (index_sid
);
2232 if (tree cst_index
= index_sval
->maybe_get_constant ())
2234 key_t key
= key_from_constant (cst_index
);
2235 region_id element_rid
2236 = get_or_create (model
, this_rid
, key
, element_type
, ctxt
);
2240 return model
->get_or_create_view (this_rid
, element_type
, ctxt
);
2243 /* Implementation of region::clone vfunc for array_region. */
2246 array_region::clone () const
2248 return new array_region (*this);
2251 /* Compare the fields of this array_region with OTHER, returning true
2253 For use by region::operator==. */
2256 array_region::compare_fields (const array_region
&other
) const
2258 if (m_map
.elements () != other
.m_map
.elements ())
2261 for (map_t::iterator iter
= m_map
.begin ();
2262 iter
!= m_map
.end ();
2265 int key
= (*iter
).first
;
2266 region_id e
= (*iter
).second
;
2267 region_id
*other_slot
= const_cast <map_t
&> (other
.m_map
).get (key
);
2268 if (other_slot
== NULL
)
2270 if (e
!= *other_slot
)
2276 /* Implementation of region::print_fields vfunc for array_region. */
2279 array_region::print_fields (const region_model
&model
,
2281 pretty_printer
*pp
) const
2283 region::print_fields (model
, this_rid
, pp
);
2284 pp_string (pp
, ", array: {");
2285 for (map_t::iterator iter
= m_map
.begin ();
2286 iter
!= m_map
.end ();
2289 if (iter
!= m_map
.begin ())
2290 pp_string (pp
, ", ");
2291 int key
= (*iter
).first
;
2292 region_id child_rid
= (*iter
).second
;
2293 pp_printf (pp
, "[%i]: ", key
);
2294 child_rid
.print (pp
);
2296 pp_string (pp
, "}");
2299 /* Implementation of region::validate vfunc for array_region. */
2302 array_region::validate (const region_model
&model
) const
2304 region::validate (model
);
2305 for (map_t::iterator iter
= m_map
.begin ();
2306 iter
!= m_map
.end ();
2309 region_id child_rid
= (*iter
).second
;
2310 child_rid
.validate (model
);
2314 /* Implementation of region::dump_dot_to_pp vfunc for array_region. */
2317 array_region::dump_dot_to_pp (const region_model
&model
,
2319 pretty_printer
*pp
) const
2321 region::dump_dot_to_pp (model
, this_rid
, pp
);
2322 for (map_t::iterator iter
= m_map
.begin ();
2323 iter
!= m_map
.end ();
2326 // TODO: add nodes/edges to label things
2328 int key
= (*iter
).first
;
2329 region_id child_rid
= (*iter
).second
;
2331 pp_printf (pp
, "rid_label_%i [label=\"", child_rid
.as_int ());
2332 pp_write_text_to_stream (pp
);
2333 pp_printf (pp
, "%qi", key
);
2334 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
2335 pp_string (pp
, "\"];");
2338 pp_printf (pp
, "rid_label_%i", child_rid
.as_int ());
2339 pp_string (pp
, " -> ");
2340 child_rid
.dump_node_name_to_pp (pp
);
2341 pp_string (pp
, ";");
2346 /* Implementation of region::dump_child_label vfunc for array_region. */
2349 array_region::dump_child_label (const region_model
&model
,
2351 region_id child_rid
,
2352 pretty_printer
*pp
) const
2354 region::dump_child_label (model
, this_rid
, child_rid
, pp
);
2356 for (map_t::iterator iter
= m_map
.begin ();
2357 iter
!= m_map
.end ();
2360 if (child_rid
== (*iter
).second
)
2362 int key
= (*iter
).first
;
2363 pp_printf (pp
, "[%i]: ", key
);
2368 /* Look for a child region for KEY within this array_region.
2369 If it doesn't already exist, create a child array_region, using TYPE for
2371 Return the region_id of the child (whether pre-existing, or
2373 Notify CTXT if we don't know how to handle TYPE. */
2376 array_region::get_or_create (region_model
*model
,
2380 region_model_context
*ctxt
)
2382 region_id
*slot
= m_map
.get (key
);
2385 region_id child_rid
= model
->add_region_for_type (this_rid
, type
, ctxt
);
2386 m_map
.put (key
, child_rid
);
2390 /* Get the region_id for the child region for KEY within this
2391 ARRAY_REGION, or NULL if there is no such child region. */
2394 array_region::get (key_t key
)
2396 region_id
*slot
= m_map
.get (key
);
2400 /* Implementation of region::add_to_hash vfunc for array_region. */
2403 array_region::add_to_hash (inchash::hash
&hstate
) const
2405 region::add_to_hash (hstate
);
2409 /* Implementation of region::remap_region_ids vfunc for array_region. */
2412 array_region::remap_region_ids (const region_id_map
&map
)
2414 region::remap_region_ids (map
);
2416 /* Remap the region ids within the map entries. */
2417 for (map_t::iterator iter
= m_map
.begin ();
2418 iter
!= m_map
.end (); ++iter
)
2419 map
.update (&(*iter
).second
);
2422 /* Look for a child region with id CHILD_RID within this array_region.
2423 If one is found, write its key to *OUT and return true,
2424 otherwise return false. */
2427 array_region::get_key_for_child_region (region_id child_rid
, key_t
*out
) const
2429 // TODO: do we want to store an inverse map?
2430 for (map_t::iterator iter
= m_map
.begin ();
2431 iter
!= m_map
.end ();
2434 key_t key
= (*iter
).first
;
2435 region_id r
= (*iter
).second
;
2446 /* qsort comparator for array_region's keys. */
2449 array_region::key_cmp (const void *p1
, const void *p2
)
2451 key_t i1
= *(const key_t
*)p1
;
2452 key_t i2
= *(const key_t
*)p2
;
2462 /* Implementation of region::walk_for_canonicalization vfunc for
2466 array_region::walk_for_canonicalization (canonicalization
*c
) const
2468 auto_vec
<int> keys (m_map
.elements ());
2469 for (map_t::iterator iter
= m_map
.begin ();
2470 iter
!= m_map
.end ();
2473 int key_a
= (*iter
).first
;
2474 keys
.quick_push (key_a
);
2476 keys
.qsort (key_cmp
);
2480 FOR_EACH_VEC_ELT (keys
, i
, key
)
2482 region_id rid
= *const_cast<array_region
*>(this)->m_map
.get (key
);
2487 /* Convert constant CST into an array_region::key_t. */
2490 array_region::key_from_constant (tree cst
)
2492 gcc_assert (CONSTANT_CLASS_P (cst
));
2493 wide_int w
= wi::to_wide (cst
);
2494 key_t result
= w
.to_shwi ();
2498 /* Convert array_region::key_t KEY into a tree constant. */
2501 array_region::constant_from_key (key_t key
)
2503 tree array_type
= get_type ();
2504 tree index_type
= TYPE_DOMAIN (array_type
);
2505 return build_int_cst (index_type
, key
);
2508 /* class function_region : public map_region. */
2510 /* Compare the fields of this function_region with OTHER, returning true
2512 For use by region::operator==. */
2515 function_region::compare_fields (const function_region
&other
) const
2517 return map_region::compare_fields (other
);
2520 /* Implementation of region::clone vfunc for function_region. */
2523 function_region::clone () const
2525 return new function_region (*this);
2528 /* Implementation of map_region::valid_key_p vfunc for function_region. */
2531 function_region::valid_key_p (tree key
) const
2533 return TREE_CODE (key
) == LABEL_DECL
;
2536 /* class stack_region : public region. */
2538 /* stack_region's copy ctor. */
2540 stack_region::stack_region (const stack_region
&other
)
2542 m_frame_rids (other
.m_frame_rids
.length ())
2545 region_id
*frame_rid
;
2546 FOR_EACH_VEC_ELT (other
.m_frame_rids
, i
, frame_rid
)
2547 m_frame_rids
.quick_push (*frame_rid
);
2550 /* Compare the fields of this stack_region with OTHER, returning true
2552 For use by region::operator==. */
2555 stack_region::compare_fields (const stack_region
&other
) const
2557 if (m_frame_rids
.length () != other
.m_frame_rids
.length ())
2561 region_id
*frame_rid
;
2562 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2563 if (m_frame_rids
[i
] != other
.m_frame_rids
[i
])
2569 /* Implementation of region::clone vfunc for stack_region. */
2572 stack_region::clone () const
2574 return new stack_region (*this);
2577 /* Implementation of region::print_fields vfunc for stack_region. */
2580 stack_region::print_fields (const region_model
&model
,
2582 pretty_printer
*pp
) const
2584 region::print_fields (model
, this_rid
, pp
);
2588 /* Implementation of region::dump_child_label vfunc for stack_region. */
2591 stack_region::dump_child_label (const region_model
&model
,
2592 region_id this_rid ATTRIBUTE_UNUSED
,
2593 region_id child_rid
,
2594 pretty_printer
*pp
) const
2596 function
*fun
= model
.get_region
<frame_region
> (child_rid
)->get_function ();
2597 pp_printf (pp
, "frame for %qs: ", function_name (fun
));
2600 /* Implementation of region::validate vfunc for stack_region. */
2603 stack_region::validate (const region_model
&model
) const
2605 region::validate (model
);
2607 region_id
*frame_rid
;
2608 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2609 m_frame_rids
[i
].validate (model
);
2612 /* Push FRAME_RID (for a frame_region) onto this stack. */
2615 stack_region::push_frame (region_id frame_rid
)
2617 m_frame_rids
.safe_push (frame_rid
);
2620 /* Get the region_id of the top-most frame in this stack, if any. */
2623 stack_region::get_current_frame_id () const
2625 if (m_frame_rids
.length () > 0)
2626 return m_frame_rids
[m_frame_rids
.length () - 1];
2628 return region_id::null ();
2631 /* Pop the topmost frame_region from this stack.
2633 Purge the frame region and all its descendent regions.
2634 Convert any pointers that point into such regions into
2635 POISON_KIND_POPPED_STACK svalues.
2637 Return the ID of any return value from the frame.
2639 If PURGE, then purge all unused svalues, with the exception of any
2640 return value for the frame, which is temporarily
2641 preserved in case no regions reference it, so it can
2642 be written into a region in the caller.
2644 Accumulate stats on purged entities into STATS. */
2647 stack_region::pop_frame (region_model
*model
, bool purge
, purge_stats
*stats
,
2648 region_model_context
*ctxt
)
2650 gcc_assert (m_frame_rids
.length () > 0);
2652 region_id frame_rid
= get_current_frame_id ();
2653 frame_region
*frame
= model
->get_region
<frame_region
> (frame_rid
);
2655 /* Evaluate the result, within the callee frame. */
2656 svalue_id result_sid
;
2657 tree fndecl
= frame
->get_function ()->decl
;
2658 tree result
= DECL_RESULT (fndecl
);
2659 if (result
&& TREE_TYPE (result
) != void_type_node
)
2660 result_sid
= model
->get_rvalue (result
, ctxt
);
2662 /* Pop the frame RID. */
2663 m_frame_rids
.pop ();
2665 model
->delete_region_and_descendents (frame_rid
,
2666 POISON_KIND_POPPED_STACK
,
2668 ctxt
? ctxt
->get_logger () : NULL
);
2670 /* Delete unused svalues, but don't delete the return value. */
2672 model
->purge_unused_svalues (stats
, ctxt
, &result_sid
);
2679 /* Implementation of region::add_to_hash vfunc for stack_region. */
2682 stack_region::add_to_hash (inchash::hash
&hstate
) const
2684 region::add_to_hash (hstate
);
2687 region_id
*frame_rid
;
2688 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2689 inchash::add (*frame_rid
, hstate
);
2692 /* Implementation of region::remap_region_ids vfunc for stack_region. */
2695 stack_region::remap_region_ids (const region_id_map
&map
)
2697 region::remap_region_ids (map
);
2699 region_id
*frame_rid
;
2700 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2701 map
.update (&m_frame_rids
[i
]);
2704 /* Attempt to merge STACK_REGION_A and STACK_REGION_B using MERGER.
2705 Return true if the merger is possible, false otherwise. */
2708 stack_region::can_merge_p (const stack_region
*stack_region_a
,
2709 const stack_region
*stack_region_b
,
2710 model_merger
*merger
)
2712 if (stack_region_a
->get_num_frames ()
2713 != stack_region_b
->get_num_frames ())
2716 region_model
*merged_model
= merger
->m_merged_model
;
2718 region_id rid_merged_stack
2719 = merged_model
->get_root_region ()->ensure_stack_region (merged_model
);
2721 stack_region
*merged_stack
2722 = merged_model
->get_region
<stack_region
> (rid_merged_stack
);
2724 /* First, create all frames in the merged model, without populating them.
2725 The merging code assumes that all frames in the merged model already exist,
2726 so we have to do this first to handle the case in which a local in an
2727 older frame points at a local in a more recent frame. */
2728 for (unsigned i
= 0; i
< stack_region_a
->get_num_frames (); i
++)
2730 region_id rid_a
= stack_region_a
->get_frame_rid (i
);
2731 frame_region
*frame_a
= merger
->get_region_a
<frame_region
> (rid_a
);
2733 region_id rid_b
= stack_region_b
->get_frame_rid (i
);
2734 frame_region
*frame_b
= merger
->get_region_b
<frame_region
> (rid_b
);
2736 if (frame_a
->get_function () != frame_b
->get_function ())
2739 frame_region
*merged_frame
= new frame_region (rid_merged_stack
,
2740 frame_a
->get_function (),
2741 frame_a
->get_depth ());
2742 region_id rid_merged_frame
= merged_model
->add_region (merged_frame
);
2743 merged_stack
->push_frame (rid_merged_frame
);
2746 /* Now populate the frames we created. */
2747 for (unsigned i
= 0; i
< stack_region_a
->get_num_frames (); i
++)
2749 region_id rid_a
= stack_region_a
->get_frame_rid (i
);
2750 frame_region
*frame_a
= merger
->get_region_a
<frame_region
> (rid_a
);
2752 region_id rid_b
= stack_region_b
->get_frame_rid (i
);
2753 frame_region
*frame_b
= merger
->get_region_b
<frame_region
> (rid_b
);
2755 region_id rid_merged_frame
= merged_stack
->get_frame_rid (i
);
2756 frame_region
*merged_frame
2757 = merged_model
->get_region
<frame_region
> (rid_merged_frame
);
2758 if (!map_region::can_merge_p (frame_a
, frame_b
,
2759 merged_frame
, rid_merged_frame
,
2767 /* Implementation of region::walk_for_canonicalization vfunc for
2771 stack_region::walk_for_canonicalization (canonicalization
*c
) const
2774 region_id
*frame_rid
;
2775 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2776 c
->walk_rid (*frame_rid
);
2779 /* For debugging purposes: look for a grandchild region within one of
2780 the child frame regions, where the grandchild is for a decl named
2781 IDENTIFIER (or an SSA_NAME for such a decl):
2785 `-region for decl named IDENTIFIER
2787 returning its value, or svalue_id::null if none are found. */
2790 stack_region::get_value_by_name (tree identifier
,
2791 const region_model
&model
) const
2794 region_id
*frame_rid
;
2795 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2797 frame_region
*frame
= model
.get_region
<frame_region
> (*frame_rid
);
2798 svalue_id sid
= frame
->get_value_by_name (identifier
, model
);
2803 return svalue_id::null ();
2806 /* class heap_region : public region. */
2808 /* heap_region's copy ctor. */
2810 heap_region::heap_region (const heap_region
&other
)
2815 /* Compare the fields of this heap_region with OTHER, returning true
2817 For use by region::operator==. */
2820 heap_region::compare_fields (const heap_region
&) const
2826 /* Implementation of region::clone vfunc for heap_region. */
2829 heap_region::clone () const
2831 return new heap_region (*this);
2834 /* Implementation of region::walk_for_canonicalization vfunc for
2838 heap_region::walk_for_canonicalization (canonicalization
*) const
2843 /* class root_region : public region. */
2845 /* root_region's default ctor. */
2847 root_region::root_region ()
2848 : region (region_id::null (),
2854 /* root_region's copy ctor. */
2856 root_region::root_region (const root_region
&other
)
2858 m_stack_rid (other
.m_stack_rid
),
2859 m_globals_rid (other
.m_globals_rid
),
2860 m_code_rid (other
.m_code_rid
),
2861 m_heap_rid (other
.m_heap_rid
)
2865 /* Compare the fields of this root_region with OTHER, returning true
2867 For use by region::operator==. */
2870 root_region::compare_fields (const root_region
&other
) const
2872 if (m_stack_rid
!= other
.m_stack_rid
)
2874 if (m_globals_rid
!= other
.m_globals_rid
)
2876 if (m_code_rid
!= other
.m_code_rid
)
2878 if (m_heap_rid
!= other
.m_heap_rid
)
2883 /* Implementation of region::clone vfunc for root_region. */
2886 root_region::clone () const
2888 return new root_region (*this);
2891 /* Implementation of region::print_fields vfunc for root_region. */
2894 root_region::print_fields (const region_model
&model
,
2896 pretty_printer
*pp
) const
2898 region::print_fields (model
, this_rid
, pp
);
2902 /* Implementation of region::validate vfunc for root_region. */
2905 root_region::validate (const region_model
&model
) const
2907 region::validate (model
);
2908 m_stack_rid
.validate (model
);
2909 m_globals_rid
.validate (model
);
2910 m_code_rid
.validate (model
);
2911 m_heap_rid
.validate (model
);
2914 /* Implementation of region::dump_child_label vfunc for root_region. */
2917 root_region::dump_child_label (const region_model
&model ATTRIBUTE_UNUSED
,
2918 region_id this_rid ATTRIBUTE_UNUSED
,
2919 region_id child_rid
,
2920 pretty_printer
*pp
) const
2922 if (child_rid
== m_stack_rid
)
2923 pp_printf (pp
, "stack: ");
2924 else if (child_rid
== m_globals_rid
)
2925 pp_printf (pp
, "globals: ");
2926 else if (child_rid
== m_code_rid
)
2927 pp_printf (pp
, "code: ");
2928 else if (child_rid
== m_heap_rid
)
2929 pp_printf (pp
, "heap: ");
2932 /* Create a new frame_region for a call to FUN and push it onto
2935 If ARG_SIDS is non-NULL, use it to populate the parameters
2937 Otherwise, populate them with unknown values.
2939 Return the region_id of the new frame. */
2942 root_region::push_frame (region_model
*model
, function
*fun
,
2943 vec
<svalue_id
> *arg_sids
,
2944 region_model_context
*ctxt
)
2947 /* arg_sids can be NULL. */
2949 ensure_stack_region (model
);
2950 stack_region
*stack
= model
->get_region
<stack_region
> (m_stack_rid
);
2952 frame_region
*region
= new frame_region (m_stack_rid
, fun
,
2953 stack
->get_num_frames ());
2954 region_id frame_rid
= model
->add_region (region
);
2956 // TODO: unify these cases by building a vec of unknown?
2960 /* Arguments supplied from a caller frame. */
2962 tree fndecl
= fun
->decl
;
2964 for (tree iter_parm
= DECL_ARGUMENTS (fndecl
); iter_parm
;
2965 iter_parm
= DECL_CHAIN (iter_parm
), ++idx
)
2967 /* If there's a mismatching declaration, the call stmt might
2968 not have enough args. Handle this case by leaving the
2969 rest of the params as uninitialized. */
2970 if (idx
>= arg_sids
->length ())
2972 svalue_id arg_sid
= (*arg_sids
)[idx
];
2974 = region
->get_or_create (model
, frame_rid
, iter_parm
,
2975 TREE_TYPE (iter_parm
), ctxt
);
2976 model
->set_value (parm_rid
, arg_sid
, ctxt
);
2978 /* Also do it for default SSA name (sharing the same unknown
2980 tree parm_default_ssa
= ssa_default_def (fun
, iter_parm
);
2981 if (parm_default_ssa
)
2983 region_id defssa_rid
2984 = region
->get_or_create (model
, frame_rid
, parm_default_ssa
,
2985 TREE_TYPE (iter_parm
), ctxt
);
2986 model
->set_value (defssa_rid
, arg_sid
, ctxt
);
2992 /* No known arguments (a top-level call within the analysis). */
2994 /* Params have a defined, unknown value; they should not inherit
2995 from the poisoned uninit value. */
2996 tree fndecl
= fun
->decl
;
2997 for (tree iter_parm
= DECL_ARGUMENTS (fndecl
); iter_parm
;
2998 iter_parm
= DECL_CHAIN (iter_parm
))
3001 = region
->get_or_create (model
, frame_rid
, iter_parm
,
3002 TREE_TYPE (iter_parm
), ctxt
);
3004 = model
->set_to_new_unknown_value (parm_rid
, TREE_TYPE (iter_parm
),
3007 /* Also do it for default SSA name (sharing the same unknown
3009 tree parm_default_ssa
= ssa_default_def (fun
, iter_parm
);
3010 if (parm_default_ssa
)
3012 region_id defssa_rid
3013 = region
->get_or_create (model
, frame_rid
, parm_default_ssa
,
3014 TREE_TYPE (iter_parm
), ctxt
);
3015 model
->get_region (defssa_rid
)->set_value (*model
, defssa_rid
,
3021 stack
->push_frame (frame_rid
);
3026 /* Get the region_id of the top-most frame in this root_region's stack,
3030 root_region::get_current_frame_id (const region_model
&model
) const
3032 stack_region
*stack
= model
.get_region
<stack_region
> (m_stack_rid
);
3034 return stack
->get_current_frame_id ();
3036 return region_id::null ();
3039 /* Pop the topmost frame_region from this root_region's stack;
3040 see the comment for stack_region::pop_frame. */
3043 root_region::pop_frame (region_model
*model
, bool purge
, purge_stats
*out
,
3044 region_model_context
*ctxt
)
3046 stack_region
*stack
= model
->get_region
<stack_region
> (m_stack_rid
);
3047 return stack
->pop_frame (model
, purge
, out
, ctxt
);
3050 /* Return the region_id of the stack region, creating it if doesn't
3054 root_region::ensure_stack_region (region_model
*model
)
3056 if (m_stack_rid
.null_p ())
3058 svalue_id uninit_sid
3059 = model
->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT
,
3062 = model
->add_region (new stack_region (model
->get_root_rid (),
3068 /* Return the stack region (which could be NULL). */
3071 root_region::get_stack_region (const region_model
*model
) const
3073 return model
->get_region
<stack_region
> (m_stack_rid
);
3076 /* Return the region_id of the globals region, creating it if doesn't
3080 root_region::ensure_globals_region (region_model
*model
)
3082 if (m_globals_rid
.null_p ())
3084 = model
->add_region (new globals_region (model
->get_root_rid ()));
3085 return m_globals_rid
;
3088 /* Return the code region (which could be NULL). */
3091 root_region::get_code_region (const region_model
*model
) const
3093 return model
->get_region
<code_region
> (m_code_rid
);
3096 /* Return the region_id of the code region, creating it if doesn't
3100 root_region::ensure_code_region (region_model
*model
)
3102 if (m_code_rid
.null_p ())
3104 = model
->add_region (new code_region (model
->get_root_rid ()));
3108 /* Return the globals region (which could be NULL). */
3111 root_region::get_globals_region (const region_model
*model
) const
3113 return model
->get_region
<globals_region
> (m_globals_rid
);
3116 /* Return the region_id of the heap region, creating it if doesn't
3120 root_region::ensure_heap_region (region_model
*model
)
3122 if (m_heap_rid
.null_p ())
3124 svalue_id uninit_sid
3125 = model
->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT
,
3128 = model
->add_region (new heap_region (model
->get_root_rid (),
3134 /* Return the heap region (which could be NULL). */
3137 root_region::get_heap_region (const region_model
*model
) const
3139 return model
->get_region
<heap_region
> (m_heap_rid
);
3142 /* Implementation of region::remap_region_ids vfunc for root_region. */
3145 root_region::remap_region_ids (const region_id_map
&map
)
3147 map
.update (&m_stack_rid
);
3148 map
.update (&m_globals_rid
);
3149 map
.update (&m_code_rid
);
3150 map
.update (&m_heap_rid
);
3153 /* Attempt to merge ROOT_REGION_A and ROOT_REGION_B into
3154 MERGED_ROOT_REGION using MERGER.
3155 Return true if the merger is possible, false otherwise. */
3158 root_region::can_merge_p (const root_region
*root_region_a
,
3159 const root_region
*root_region_b
,
3160 root_region
*merged_root_region
,
3161 model_merger
*merger
)
3163 /* We can only merge if the stacks are sufficiently similar. */
3164 stack_region
*stack_a
= root_region_a
->get_stack_region (merger
->m_model_a
);
3165 stack_region
*stack_b
= root_region_b
->get_stack_region (merger
->m_model_b
);
3166 if (stack_a
&& stack_b
)
3168 /* If the two models both have a stack, attempt to merge them. */
3169 merged_root_region
->ensure_stack_region (merger
->m_merged_model
);
3170 if (!stack_region::can_merge_p (stack_a
, stack_b
, merger
))
3173 else if (stack_a
|| stack_b
)
3174 /* Don't attempt to merge if one model has a stack and the other
3178 map_region
*globals_a
= root_region_a
->get_globals_region (merger
->m_model_a
);
3179 map_region
*globals_b
= root_region_b
->get_globals_region (merger
->m_model_b
);
3180 if (globals_a
&& globals_b
)
3182 /* If both models have globals regions, attempt to merge them. */
3183 region_id merged_globals_rid
3184 = merged_root_region
->ensure_globals_region (merger
->m_merged_model
);
3185 map_region
*merged_globals
3186 = merged_root_region
->get_globals_region (merger
->m_merged_model
);
3187 if (!map_region::can_merge_p (globals_a
, globals_b
,
3188 merged_globals
, merged_globals_rid
,
3192 /* otherwise, merge as "no globals". */
3194 map_region
*code_a
= root_region_a
->get_code_region (merger
->m_model_a
);
3195 map_region
*code_b
= root_region_b
->get_code_region (merger
->m_model_b
);
3196 if (code_a
&& code_b
)
3198 /* If both models have code regions, attempt to merge them. */
3199 region_id merged_code_rid
3200 = merged_root_region
->ensure_code_region (merger
->m_merged_model
);
3201 map_region
*merged_code
3202 = merged_root_region
->get_code_region (merger
->m_merged_model
);
3203 if (!map_region::can_merge_p (code_a
, code_b
,
3204 merged_code
, merged_code_rid
,
3208 /* otherwise, merge as "no code". */
3210 heap_region
*heap_a
= root_region_a
->get_heap_region (merger
->m_model_a
);
3211 heap_region
*heap_b
= root_region_b
->get_heap_region (merger
->m_model_b
);
3212 if (heap_a
&& heap_b
)
3214 /* If both have a heap, create a "merged" heap.
3215 Actually merging the heap contents happens via the region_svalue
3216 instances, as needed, when seeing pairs of region_svalue instances. */
3217 merged_root_region
->ensure_heap_region (merger
->m_merged_model
);
3219 /* otherwise, merge as "no heap". */
3224 /* Implementation of region::add_to_hash vfunc for root_region. */
3227 root_region::add_to_hash (inchash::hash
&hstate
) const
3229 region::add_to_hash (hstate
);
3230 inchash::add (m_stack_rid
, hstate
);
3231 inchash::add (m_globals_rid
, hstate
);
3232 inchash::add (m_code_rid
, hstate
);
3233 inchash::add (m_heap_rid
, hstate
);
3236 /* Implementation of region::walk_for_canonicalization vfunc for
3240 root_region::walk_for_canonicalization (canonicalization
*c
) const
3242 c
->walk_rid (m_stack_rid
);
3243 c
->walk_rid (m_globals_rid
);
3244 c
->walk_rid (m_code_rid
);
3245 c
->walk_rid (m_heap_rid
);
3248 /* For debugging purposes: look for a descendant region for a local
3249 or global decl named IDENTIFIER (or an SSA_NAME for such a decl),
3250 returning its value, or svalue_id::null if none are found. */
3253 root_region::get_value_by_name (tree identifier
,
3254 const region_model
&model
) const
3256 if (stack_region
*stack
= get_stack_region (&model
))
3258 svalue_id sid
= stack
->get_value_by_name (identifier
, model
);
3262 if (map_region
*globals
= get_globals_region (&model
))
3264 svalue_id sid
= globals
->get_value_by_name (identifier
, model
);
3268 return svalue_id::null ();
3271 /* class symbolic_region : public map_region. */
3273 /* symbolic_region's copy ctor. */
3275 symbolic_region::symbolic_region (const symbolic_region
&other
)
3277 m_possibly_null (other
.m_possibly_null
)
3281 /* Compare the fields of this symbolic_region with OTHER, returning true
3283 For use by region::operator==. */
3286 symbolic_region::compare_fields (const symbolic_region
&other
) const
3288 return m_possibly_null
== other
.m_possibly_null
;
3291 /* Implementation of region::clone vfunc for symbolic_region. */
3294 symbolic_region::clone () const
3296 return new symbolic_region (*this);
3299 /* Implementation of region::walk_for_canonicalization vfunc for
3303 symbolic_region::walk_for_canonicalization (canonicalization
*) const
3308 /* class region_model. */
3310 /* region_model's default ctor. */
3312 region_model::region_model ()
3314 m_root_rid
= add_region (new root_region ());
3315 m_constraints
= new impl_constraint_manager (this);
3319 /* region_model's copy ctor. */
3321 region_model::region_model (const region_model
&other
)
3322 : m_svalues (other
.m_svalues
.length ()),
3323 m_regions (other
.m_regions
.length ()),
3324 m_root_rid (other
.m_root_rid
)
3326 /* Clone the svalues and regions. */
3330 FOR_EACH_VEC_ELT (other
.m_svalues
, i
, svalue
)
3331 m_svalues
.quick_push (svalue
->clone ());
3334 FOR_EACH_VEC_ELT (other
.m_regions
, i
, region
)
3335 m_regions
.quick_push (region
->clone ());
3337 m_constraints
= other
.m_constraints
->clone (this);
3340 /* region_model's dtor. */
3342 region_model::~region_model ()
3344 delete m_constraints
;
3347 /* region_model's assignment operator. */
3350 region_model::operator= (const region_model
&other
)
3356 /* Delete existing content. */
3357 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3359 m_svalues
.truncate (0);
3361 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3363 m_regions
.truncate (0);
3365 delete m_constraints
;
3367 /* Clone the svalues and regions. */
3368 m_svalues
.reserve (other
.m_svalues
.length (), true);
3369 FOR_EACH_VEC_ELT (other
.m_svalues
, i
, svalue
)
3370 m_svalues
.quick_push (svalue
->clone ());
3372 m_regions
.reserve (other
.m_regions
.length (), true);
3373 FOR_EACH_VEC_ELT (other
.m_regions
, i
, region
)
3374 m_regions
.quick_push (region
->clone ());
3376 m_root_rid
= other
.m_root_rid
;
3378 m_constraints
= other
.m_constraints
->clone (this);
3383 /* Equality operator for region_model.
3385 Amongst other things this directly compares the svalue and region
3386 vectors and so for this to be meaningful both this and OTHER should
3387 have been canonicalized. */
3390 region_model::operator== (const region_model
&other
) const
3392 if (m_root_rid
!= other
.m_root_rid
)
3395 if (m_svalues
.length () != other
.m_svalues
.length ())
3398 if (m_regions
.length () != other
.m_regions
.length ())
3401 if (*m_constraints
!= *other
.m_constraints
)
3406 FOR_EACH_VEC_ELT (other
.m_svalues
, i
, svalue
)
3407 if (!(*m_svalues
[i
] == *other
.m_svalues
[i
]))
3411 FOR_EACH_VEC_ELT (other
.m_regions
, i
, region
)
3412 if (!(*m_regions
[i
] == *other
.m_regions
[i
]))
3415 gcc_checking_assert (hash () == other
.hash ());
3420 /* Generate a hash value for this region_model. */
3423 region_model::hash () const
3425 hashval_t result
= 0;
3429 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3430 result
^= svalue
->hash ();
3433 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3434 result
^= region
->hash ();
3436 result
^= m_constraints
->hash ();
3441 /* Print an all-on-one-line representation of this region_model to PP,
3442 which must support %E for trees. */
3445 region_model::print (pretty_printer
*pp
) const
3449 pp_string (pp
, "svalues: [");
3451 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3454 pp_string (pp
, ", ");
3455 print_svalue (svalue_id::from_int (i
), pp
);
3458 pp_string (pp
, "], regions: [");
3461 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3464 pp_string (pp
, ", ");
3465 region
->print (*this, region_id::from_int (i
), pp
);
3468 pp_string (pp
, "], constraints: ");
3470 m_constraints
->print (pp
);
3473 /* Print the svalue with id SID to PP. */
3476 region_model::print_svalue (svalue_id sid
, pretty_printer
*pp
) const
3478 get_svalue (sid
)->print (*this, sid
, pp
);
3481 /* Dump a .dot representation of this region_model to PP, showing
3482 the values and the hierarchy of regions. */
3485 region_model::dump_dot_to_pp (pretty_printer
*pp
) const
3487 graphviz_out
gv (pp
);
3489 pp_string (pp
, "digraph \"");
3490 pp_write_text_to_stream (pp
);
3491 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
3492 pp_string (pp
, "\" {\n");
3496 pp_string (pp
, "overlap=false;\n");
3497 pp_string (pp
, "compound=true;\n");
3502 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3503 svalue
->dump_dot_to_pp (*this, svalue_id::from_int (i
), pp
);
3506 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3507 region
->dump_dot_to_pp (*this, region_id::from_int (i
), pp
);
3509 /* TODO: constraints. */
3511 /* Terminate "digraph" */
3513 pp_string (pp
, "}");
3517 /* Dump a .dot representation of this region_model to FP. */
3520 region_model::dump_dot_to_file (FILE *fp
) const
3523 pp_format_decoder (&pp
) = default_tree_printer
;
3524 pp
.buffer
->stream
= fp
;
3525 dump_dot_to_pp (&pp
);
3529 /* Dump a .dot representation of this region_model to PATH. */
3532 region_model::dump_dot (const char *path
) const
3534 FILE *fp
= fopen (path
, "w");
3535 dump_dot_to_file (fp
);
3539 /* Dump a multiline representation of this model to PP, showing the
3540 region hierarchy, the svalues, and any constraints.
3542 If SUMMARIZE is true, show only the most pertinent information,
3543 in a form that attempts to be less verbose.
3544 Otherwise, show all information. */
3547 region_model::dump_to_pp (pretty_printer
*pp
, bool summarize
) const
3551 auto_vec
<path_var
> rep_path_vars
;
3555 FOR_EACH_VEC_ELT (m_regions
, i
, reg
)
3557 region_id rid
= region_id::from_int (i
);
3558 path_var pv
= get_representative_path_var (rid
);
3560 rep_path_vars
.safe_push (pv
);
3562 bool is_first
= true;
3564 /* Work with a copy in case the get_lvalue calls change anything
3565 (they shouldn't). */
3566 region_model
copy (*this);
3567 copy
.dump_summary_of_rep_path_vars (pp
, &rep_path_vars
, &is_first
);
3570 FOR_EACH_VEC_ELT (m_constraints
->m_equiv_classes
, i
, ec
)
3572 for (unsigned j
= 0; j
< ec
->m_vars
.length (); j
++)
3574 svalue_id lhs_sid
= ec
->m_vars
[j
];
3575 tree lhs_tree
= get_representative_tree (lhs_sid
);
3576 if (lhs_tree
== NULL_TREE
)
3578 for (unsigned k
= j
+ 1; k
< ec
->m_vars
.length (); k
++)
3580 svalue_id rhs_sid
= ec
->m_vars
[k
];
3581 tree rhs_tree
= get_representative_tree (rhs_sid
);
3583 && !(CONSTANT_CLASS_P (lhs_tree
)
3584 && CONSTANT_CLASS_P (rhs_tree
)))
3586 dump_separator (pp
, &is_first
);
3587 dump_tree (pp
, lhs_tree
);
3588 pp_string (pp
, " == ");
3589 dump_tree (pp
, rhs_tree
);
3596 FOR_EACH_VEC_ELT (m_constraints
->m_constraints
, i
, c
)
3598 const equiv_class
&lhs
= c
->m_lhs
.get_obj (*m_constraints
);
3599 const equiv_class
&rhs
= c
->m_rhs
.get_obj (*m_constraints
);
3600 svalue_id lhs_sid
= lhs
.get_representative ();
3601 svalue_id rhs_sid
= rhs
.get_representative ();
3602 tree lhs_tree
= get_representative_tree (lhs_sid
);
3603 tree rhs_tree
= get_representative_tree (rhs_sid
);
3604 if (lhs_tree
&& rhs_tree
3605 && !(CONSTANT_CLASS_P (lhs_tree
) && CONSTANT_CLASS_P (rhs_tree
)))
3607 dump_separator (pp
, &is_first
);
3608 dump_tree (pp
, lhs_tree
);
3609 pp_printf (pp
, " %s ", constraint_op_code (c
->m_op
));
3610 dump_tree (pp
, rhs_tree
);
3617 get_region (m_root_rid
)->dump_to_pp (*this, m_root_rid
, pp
, "", true);
3619 pp_string (pp
, "svalues:");
3623 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3625 pp_string (pp
, " ");
3626 svalue_id sid
= svalue_id::from_int (i
);
3627 print_svalue (sid
, pp
);
3631 pp_string (pp
, "constraint manager:");
3633 m_constraints
->dump_to_pp (pp
);
3636 /* Dump a multiline representation of this model to FILE. */
3639 region_model::dump (FILE *fp
, bool summarize
) const
3642 pp_format_decoder (&pp
) = default_tree_printer
;
3643 pp_show_color (&pp
) = pp_show_color (global_dc
->printer
);
3644 pp
.buffer
->stream
= fp
;
3645 dump_to_pp (&pp
, summarize
);
3649 /* Dump a multiline representation of this model to stderr. */
3652 region_model::dump (bool summarize
) const
3654 dump (stderr
, summarize
);
3657 /* Dump RMODEL fully to stderr (i.e. without summarization). */
3660 region_model::debug () const
3665 /* Dump VEC to PP, in the form "{VEC elements}: LABEL". */
3668 dump_vec_of_tree (pretty_printer
*pp
,
3670 const auto_vec
<tree
> &vec
,
3673 if (vec
.length () == 0)
3676 dump_separator (pp
, is_first
);
3677 pp_printf (pp
, "{");
3680 FOR_EACH_VEC_ELT (vec
, i
, key
)
3683 pp_string (pp
, ", ");
3684 dump_tree (pp
, key
);
3686 pp_printf (pp
, "}: %s", label
);
3689 /* Dump all *REP_PATH_VARS to PP in compact form, updating *IS_FIRST.
3690 Subroutine of region_model::dump_to_pp. */
3693 region_model::dump_summary_of_rep_path_vars (pretty_printer
*pp
,
3694 auto_vec
<path_var
> *rep_path_vars
,
3697 /* Print pointers, constants, and poisoned values that aren't "uninit";
3698 gather keys for unknown and uninit values. */
3701 auto_vec
<tree
> unknown_trees
;
3702 auto_vec
<tree
> uninit_trees
;
3703 FOR_EACH_VEC_ELT (*rep_path_vars
, i
, pv
)
3705 if (TREE_CODE (pv
->m_tree
) == STRING_CST
)
3707 tentative_region_model_context ctxt
;
3708 region_id child_rid
= get_lvalue (*pv
, &ctxt
);
3709 if (ctxt
.had_errors_p ())
3711 region
*child_region
= get_region (child_rid
);
3714 svalue_id sid
= child_region
->get_value_direct ();
3717 svalue
*sval
= get_svalue (sid
);
3718 switch (sval
->get_kind ())
3724 region_svalue
*region_sval
= as_a
<region_svalue
*> (sval
);
3725 region_id pointee_rid
= region_sval
->get_pointee ();
3726 gcc_assert (!pointee_rid
.null_p ());
3727 tree pointee
= get_representative_path_var (pointee_rid
).m_tree
;
3728 dump_separator (pp
, is_first
);
3729 dump_tree (pp
, pv
->m_tree
);
3730 pp_string (pp
, ": ");
3731 pp_character (pp
, '&');
3733 dump_tree (pp
, pointee
);
3735 pointee_rid
.print (pp
);
3739 dump_separator (pp
, is_first
);
3740 dump_tree (pp
, pv
->m_tree
);
3741 pp_string (pp
, ": ");
3742 dump_tree (pp
, sval
->dyn_cast_constant_svalue ()->get_constant ());
3745 unknown_trees
.safe_push (pv
->m_tree
);
3749 poisoned_svalue
*poisoned_sval
= as_a
<poisoned_svalue
*> (sval
);
3750 enum poison_kind pkind
= poisoned_sval
->get_poison_kind ();
3751 if (pkind
== POISON_KIND_UNINIT
)
3752 uninit_trees
.safe_push (pv
->m_tree
);
3755 dump_separator (pp
, is_first
);
3756 dump_tree (pp
, pv
->m_tree
);
3757 pp_printf (pp
, ": %s", poison_kind_to_str (pkind
));
3762 dump_separator (pp
, is_first
);
3763 pp_printf (pp
, "setjmp: EN: %i",
3764 sval
->dyn_cast_setjmp_svalue ()->get_enode_index ());
3769 /* Print unknown and uninitialized values in consolidated form. */
3770 dump_vec_of_tree (pp
, is_first
, unknown_trees
, "unknown");
3771 dump_vec_of_tree (pp
, is_first
, uninit_trees
, "uninit");
3774 /* Assert that this object is valid. */
3777 region_model::validate () const
3779 /* Skip this in a release build. */
3784 m_constraints
->validate ();
3788 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
3789 r
->validate (*this);
3791 // TODO: anything else?
3793 /* Verify that the stack region (if any) has an "uninitialized" value. */
3794 region
*stack_region
= get_root_region ()->get_stack_region (this);
3797 svalue_id stack_value_sid
= stack_region
->get_value_direct ();
3798 svalue
*stack_value
= get_svalue (stack_value_sid
);
3799 gcc_assert (stack_value
->get_kind () == SK_POISONED
);
3800 poisoned_svalue
*subclass
= stack_value
->dyn_cast_poisoned_svalue ();
3801 gcc_assert (subclass
);
3802 gcc_assert (subclass
->get_poison_kind () == POISON_KIND_UNINIT
);
3806 /* Global data for use by svalue_id_cmp_by_constant_svalue. */
3808 static region_model
*svalue_id_cmp_by_constant_svalue_model
= NULL
;
3810 /* Comparator for use by region_model::canonicalize. */
3813 svalue_id_cmp_by_constant_svalue (const void *p1
, const void *p2
)
3815 const svalue_id
*sid1
= (const svalue_id
*)p1
;
3816 const svalue_id
*sid2
= (const svalue_id
*)p2
;
3817 gcc_assert (!sid1
->null_p ());
3818 gcc_assert (!sid2
->null_p ());
3819 gcc_assert (svalue_id_cmp_by_constant_svalue_model
);
3821 = *svalue_id_cmp_by_constant_svalue_model
->get_svalue (*sid1
);
3823 = *svalue_id_cmp_by_constant_svalue_model
->get_svalue (*sid2
);
3824 gcc_assert (sval1
.get_kind () == SK_CONSTANT
);
3825 gcc_assert (sval2
.get_kind () == SK_CONSTANT
);
3827 tree cst1
= ((const constant_svalue
&)sval1
).get_constant ();
3828 tree cst2
= ((const constant_svalue
&)sval2
).get_constant ();
3829 return tree_cmp (cst1
, cst2
);
3832 /* Reorder the regions and svalues into a deterministic "canonical" order,
3833 to maximize the chance of equality.
3834 If non-NULL, notify CTXT about the svalue id remapping. */
3837 region_model::canonicalize (region_model_context
*ctxt
)
3839 /* Walk all regions and values in a deterministic order, visiting
3840 rids and sids, generating a rid and sid map. */
3841 canonicalization
c (*this);
3843 /* (1): Walk all svalues, putting constants first, sorting the constants
3844 (thus imposing an ordering on any constants that are purely referenced
3846 Ignore other svalues for now. */
3849 auto_vec
<svalue_id
> sids
;
3851 FOR_EACH_VEC_ELT (m_svalues
, i
, sval
)
3853 if (sval
->get_kind () == SK_CONSTANT
)
3854 sids
.safe_push (svalue_id::from_int (i
));
3856 svalue_id_cmp_by_constant_svalue_model
= this;
3857 sids
.qsort (svalue_id_cmp_by_constant_svalue
);
3858 svalue_id_cmp_by_constant_svalue_model
= NULL
;
3860 FOR_EACH_VEC_ELT (sids
, i
, sid
)
3864 /* (2): Walk all regions (and thus their values) in a deterministic
3866 c
.walk_rid (m_root_rid
);
3868 /* (3): Ensure we've visited everything, as we don't want to purge
3869 at this stage. Anything we visit for the first time here has
3874 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3875 c
.walk_rid (region_id::from_int (i
));
3877 FOR_EACH_VEC_ELT (m_svalues
, i
, sval
)
3878 c
.walk_sid (svalue_id::from_int (i
));
3881 /* (4): We now have a reordering of the regions and values.
3883 remap_svalue_ids (c
.m_sid_map
);
3884 remap_region_ids (c
.m_rid_map
);
3886 ctxt
->remap_svalue_ids (c
.m_sid_map
);
3888 /* (5): Canonicalize the constraint_manager (it has already had its
3889 svalue_ids remapped above). This makes use of the new svalue_id
3890 values, and so must happen last. */
3891 m_constraints
->canonicalize (get_num_svalues ());
3896 /* Return true if this region_model is in canonical form. */
3899 region_model::canonicalized_p () const
3901 region_model
copy (*this);
3902 copy
.canonicalize (NULL
);
3903 return *this == copy
;
3906 /* A subclass of pending_diagnostic for complaining about uses of
3909 class poisoned_value_diagnostic
3910 : public pending_diagnostic_subclass
<poisoned_value_diagnostic
>
3913 poisoned_value_diagnostic (tree expr
, enum poison_kind pkind
)
3914 : m_expr (expr
), m_pkind (pkind
)
3917 const char *get_kind () const FINAL OVERRIDE
{ return "poisoned_value_diagnostic"; }
3919 bool operator== (const poisoned_value_diagnostic
&other
) const
3921 return m_expr
== other
.m_expr
;
3924 bool emit (rich_location
*rich_loc
) FINAL OVERRIDE
3930 case POISON_KIND_UNINIT
:
3932 diagnostic_metadata m
;
3933 m
.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
3934 return warning_meta (rich_loc
, m
,
3935 OPT_Wanalyzer_use_of_uninitialized_value
,
3936 "use of uninitialized value %qE",
3940 case POISON_KIND_FREED
:
3942 diagnostic_metadata m
;
3943 m
.add_cwe (416); /* "CWE-416: Use After Free". */
3944 return warning_meta (rich_loc
, m
,
3945 OPT_Wanalyzer_use_after_free
,
3946 "use after %<free%> of %qE",
3950 case POISON_KIND_POPPED_STACK
:
3952 /* TODO: which CWE? */
3953 return warning_at (rich_loc
,
3954 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame
,
3955 "use of pointer %qE within stale stack frame",
3962 label_text
describe_final_event (const evdesc::final_event
&ev
) FINAL OVERRIDE
3968 case POISON_KIND_UNINIT
:
3969 return ev
.formatted_print ("use of uninitialized value %qE here",
3971 case POISON_KIND_FREED
:
3972 return ev
.formatted_print ("use after %<free%> of %qE here",
3974 case POISON_KIND_POPPED_STACK
:
3975 return ev
.formatted_print
3976 ("use of pointer %qE within stale stack frame here",
3983 enum poison_kind m_pkind
;
3986 /* Determine if EXPR is poisoned, and if so, queue a diagnostic to CTXT. */
3989 region_model::check_for_poison (tree expr
, region_model_context
*ctxt
)
3994 // TODO: this is disabled for now (too many false positives)
3997 svalue_id expr_sid
= get_rvalue (expr
, ctxt
);
3998 gcc_assert (!expr_sid
.null_p ());
3999 svalue
*expr_svalue
= get_svalue (expr_sid
);
4000 gcc_assert (expr_svalue
);
4001 if (const poisoned_svalue
*poisoned_sval
4002 = expr_svalue
->dyn_cast_poisoned_svalue ())
4004 enum poison_kind pkind
= poisoned_sval
->get_poison_kind ();
4005 ctxt
->warn (new poisoned_value_diagnostic (expr
, pkind
));
4009 /* Update this model for the ASSIGN stmt, using CTXT to report any
4013 region_model::on_assignment (const gassign
*assign
, region_model_context
*ctxt
)
4015 tree lhs
= gimple_assign_lhs (assign
);
4016 tree rhs1
= gimple_assign_rhs1 (assign
);
4018 region_id lhs_rid
= get_lvalue (lhs
, ctxt
);
4020 /* Check for uses of poisoned values. */
4021 switch (get_gimple_rhs_class (gimple_expr_code (assign
)))
4023 case GIMPLE_INVALID_RHS
:
4026 case GIMPLE_TERNARY_RHS
:
4027 check_for_poison (gimple_assign_rhs3 (assign
), ctxt
);
4029 case GIMPLE_BINARY_RHS
:
4030 check_for_poison (gimple_assign_rhs2 (assign
), ctxt
);
4032 case GIMPLE_UNARY_RHS
:
4033 case GIMPLE_SINGLE_RHS
:
4034 check_for_poison (gimple_assign_rhs1 (assign
), ctxt
);
4037 if (lhs_rid
.null_p ())
4039 // TODO: issue a warning for this case
4041 enum tree_code op
= gimple_assign_rhs_code (assign
);
4047 sorry_at (assign
->location
, "unhandled assignment op: %qs",
4048 get_tree_code_name (op
));
4049 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
4061 /* e.g. "x ={v} {CLOBBER};" */
4066 case POINTER_PLUS_EXPR
:
4068 /* e.g. "_1 = a_10(D) + 12;" */
4070 tree offset
= gimple_assign_rhs2 (assign
);
4072 svalue_id ptr_sid
= get_rvalue (ptr
, ctxt
);
4073 svalue_id offset_sid
= get_rvalue (offset
, ctxt
);
4074 region_id element_rid
4075 = get_or_create_pointer_plus_expr (TREE_TYPE (TREE_TYPE (ptr
)),
4076 ptr_sid
, offset_sid
,
4078 svalue_id element_ptr_sid
4079 = get_or_create_ptr_svalue (TREE_TYPE (ptr
), element_rid
);
4080 set_value (lhs_rid
, element_ptr_sid
, ctxt
);
4084 case POINTER_DIFF_EXPR
:
4086 /* e.g. "_1 = p_2(D) - q_3(D);". */
4090 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
4097 svalue_id ptr_sid
= get_rvalue (rhs1
, ctxt
);
4098 set_value (lhs_rid
, ptr_sid
, ctxt
);
4104 region_id rhs_rid
= get_lvalue (rhs1
, ctxt
);
4106 = get_region (rhs_rid
)->get_value (*this, true, ctxt
);
4107 set_value (lhs_rid
, rhs_sid
, ctxt
);
4116 svalue_id cst_sid
= get_rvalue (rhs1
, ctxt
);
4117 set_value (lhs_rid
, cst_sid
, ctxt
);
4121 case FIX_TRUNC_EXPR
:
4125 // fall though for now
4131 svalue_id var_sid
= get_rvalue (rhs1
, ctxt
);
4132 set_value (lhs_rid
, var_sid
, ctxt
);
4143 tree rhs2
= gimple_assign_rhs2 (assign
);
4145 // TODO: constraints between svalues
4146 svalue_id rhs1_sid
= get_rvalue (rhs1
, ctxt
);
4147 svalue_id rhs2_sid
= get_rvalue (rhs2
, ctxt
);
4149 tristate t
= eval_condition (rhs1_sid
, op
, rhs2_sid
);
4152 get_rvalue (t
.is_true ()
4154 : boolean_false_node
,
4158 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
4169 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
4176 case TRUNC_DIV_EXPR
:
4177 case TRUNC_MOD_EXPR
:
4187 tree rhs2
= gimple_assign_rhs2 (assign
);
4189 svalue_id rhs1_sid
= get_rvalue (rhs1
, ctxt
);
4190 svalue_id rhs2_sid
= get_rvalue (rhs2
, ctxt
);
4192 if (tree rhs1_cst
= maybe_get_constant (rhs1_sid
))
4193 if (tree rhs2_cst
= maybe_get_constant (rhs2_sid
))
4195 tree result
= fold_binary (op
, TREE_TYPE (lhs
),
4196 rhs1_cst
, rhs2_cst
);
4197 if (result
&& CONSTANT_CLASS_P (result
))
4199 svalue_id result_sid
4200 = get_or_create_constant_svalue (result
);
4201 set_value (lhs_rid
, result_sid
, ctxt
);
4205 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
4211 /* LHS = op0.op1; */
4212 region_id child_rid
= get_lvalue (rhs1
, ctxt
);
4214 = get_region (child_rid
)->get_value (*this, true, ctxt
);
4215 set_value (lhs_rid
, child_sid
, ctxt
);
4221 /* Update this model for the CALL stmt, using CTXT to report any
4222 diagnostics - the first half.
4224 Updates to the region_model that should be made *before* sm-states
4225 are updated are done here; other updates to the region_model are done
4226 in region_model::on_call_post.
4228 Return true if the function call has unknown side effects (it wasn't
4229 recognized and we don't have a body for it, or are unable to tell which
4233 region_model::on_call_pre (const gcall
*call
, region_model_context
*ctxt
)
4236 tree lhs_type
= NULL_TREE
;
4237 if (tree lhs
= gimple_call_lhs (call
))
4239 lhs_rid
= get_lvalue (lhs
, ctxt
);
4240 lhs_type
= TREE_TYPE (lhs
);
4243 /* Check for uses of poisoned values.
4244 For now, special-case "free", to avoid warning about "use-after-free"
4245 when "double free" would be more precise. */
4246 if (!is_special_named_call_p (call
, "free", 1))
4247 for (unsigned i
= 0; i
< gimple_call_num_args (call
); i
++)
4248 check_for_poison (gimple_call_arg (call
, i
), ctxt
);
4250 bool unknown_side_effects
= false;
4252 if (tree callee_fndecl
= get_fndecl_for_call (call
, ctxt
))
4254 if (is_named_call_p (callee_fndecl
, "malloc", call
, 1))
4256 // TODO: capture size as a svalue?
4257 region_id new_rid
= add_new_malloc_region ();
4258 if (!lhs_rid
.null_p ())
4261 = get_or_create_ptr_svalue (lhs_type
, new_rid
);
4262 set_value (lhs_rid
, ptr_sid
, ctxt
);
4266 else if (is_named_call_p (callee_fndecl
, "__builtin_alloca", call
, 1))
4268 region_id frame_rid
= get_current_frame_id ();
4270 = add_region (new symbolic_region (frame_rid
, NULL_TREE
, false));
4271 if (!lhs_rid
.null_p ())
4274 = get_or_create_ptr_svalue (lhs_type
, new_rid
);
4275 set_value (lhs_rid
, ptr_sid
, ctxt
);
4279 else if (gimple_call_builtin_p (call
, BUILT_IN_EXPECT
)
4280 || gimple_call_builtin_p (call
, BUILT_IN_EXPECT_WITH_PROBABILITY
)
4281 || gimple_call_internal_p (call
, IFN_BUILTIN_EXPECT
))
4283 /* __builtin_expect's return value is its initial argument. */
4284 if (!lhs_rid
.null_p ())
4286 tree initial_arg
= gimple_call_arg (call
, 0);
4287 svalue_id sid
= get_rvalue (initial_arg
, ctxt
);
4288 set_value (lhs_rid
, sid
, ctxt
);
4292 else if (is_named_call_p (callee_fndecl
, "strlen", call
, 1))
4294 region_id buf_rid
= deref_rvalue (gimple_call_arg (call
, 0), ctxt
);
4296 = get_region (buf_rid
)->get_value (*this, true, ctxt
);
4297 if (tree cst_expr
= maybe_get_constant (buf_sid
))
4299 if (TREE_CODE (cst_expr
) == STRING_CST
4300 && !lhs_rid
.null_p ())
4302 /* TREE_STRING_LENGTH is sizeof, not strlen. */
4303 int sizeof_cst
= TREE_STRING_LENGTH (cst_expr
);
4304 int strlen_cst
= sizeof_cst
- 1;
4305 tree t_cst
= build_int_cst (lhs_type
, strlen_cst
);
4306 svalue_id result_sid
4307 = get_or_create_constant_svalue (t_cst
);
4308 set_value (lhs_rid
, result_sid
, ctxt
);
4312 /* Otherwise an unknown value. */
4314 else if (is_named_call_p (callee_fndecl
,
4315 "__analyzer_dump_num_heap_regions", call
, 0))
4317 /* Handle the builtin "__analyzer_dump_num_heap_regions" by emitting
4318 a warning (for use in DejaGnu tests). */
4319 int num_heap_regions
= 0;
4320 region_id heap_rid
= get_root_region ()->ensure_heap_region (this);
4323 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
4324 if (region
->get_parent () == heap_rid
)
4326 /* Use quotes to ensure the output isn't truncated. */
4327 warning_at (call
->location
, 0,
4328 "num heap regions: %qi", num_heap_regions
);
4331 else if (!fndecl_has_gimple_body_p (callee_fndecl
)
4332 && !DECL_PURE_P (callee_fndecl
))
4333 unknown_side_effects
= true;
4336 unknown_side_effects
= true;
4338 /* Unknown return value. */
4339 if (!lhs_rid
.null_p ())
4340 set_to_new_unknown_value (lhs_rid
, lhs_type
, ctxt
);
4342 return unknown_side_effects
;
4345 /* Update this model for the CALL stmt, using CTXT to report any
4346 diagnostics - the second half.
4348 Updates to the region_model that should be made *after* sm-states
4349 are updated are done here; other updates to the region_model are done
4350 in region_model::on_call_pre.
4352 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
4356 region_model::on_call_post (const gcall
*call
,
4357 bool unknown_side_effects
,
4358 region_model_context
*ctxt
)
4360 /* Update for "free" here, after sm-handling.
4362 If the ptr points to an underlying heap region, delete the region,
4363 poisoning pointers to it and regions within it.
4365 We delay this until after sm-state has been updated so that the
4366 sm-handling can transition all of the various casts of the pointer
4367 to a "freed" state *before* we delete the related region here.
4369 This has to be done here so that the sm-handling can use the fact
4370 that they point to the same region to establish that they are equal
4371 (in region_model::eval_condition_without_cm), and thus transition
4372 all pointers to the region to the "freed" state together, regardless
4374 if (tree callee_fndecl
= get_fndecl_for_call (call
, ctxt
))
4375 if (is_named_call_p (callee_fndecl
, "free", call
, 1))
4377 tree ptr
= gimple_call_arg (call
, 0);
4378 svalue_id ptr_sid
= get_rvalue (ptr
, ctxt
);
4379 svalue
*ptr_sval
= get_svalue (ptr_sid
);
4380 if (region_svalue
*ptr_to_region_sval
4381 = ptr_sval
->dyn_cast_region_svalue ())
4383 /* If the ptr points to an underlying heap region, delete it,
4384 poisoning pointers. */
4385 region_id pointee_rid
= ptr_to_region_sval
->get_pointee ();
4386 region_id heap_rid
= get_root_region ()->ensure_heap_region (this);
4387 if (!pointee_rid
.null_p ()
4388 && get_region (pointee_rid
)->get_parent () == heap_rid
)
4391 delete_region_and_descendents (pointee_rid
,
4393 &stats
, ctxt
->get_logger ());
4394 purge_unused_svalues (&stats
, ctxt
);
4396 // TODO: do anything with stats?
4402 if (unknown_side_effects
)
4403 handle_unrecognized_call (call
, ctxt
);
4406 /* Helper class for region_model::handle_unrecognized_call, for keeping
4407 track of all regions that are reachable, and, of those, which are
4410 class reachable_regions
4413 reachable_regions (region_model
*model
)
4414 : m_model (model
), m_reachable_rids (), m_mutable_rids ()
4417 /* Lazily mark RID as being reachable, recursively adding regions
4418 reachable from RID. */
4419 void add (region_id rid
, bool is_mutable
)
4421 gcc_assert (!rid
.null_p ());
4423 unsigned idx
= rid
.as_int ();
4424 /* Bail out if this region is already in the sets at the IS_MUTABLE
4425 level of mutability. */
4426 if (!is_mutable
&& bitmap_bit_p (m_reachable_rids
, idx
))
4428 bitmap_set_bit (m_reachable_rids
, idx
);
4432 if (bitmap_bit_p (m_mutable_rids
, idx
))
4435 bitmap_set_bit (m_mutable_rids
, idx
);
4438 /* If this region's value is a pointer, add the pointee. */
4439 region
*reg
= m_model
->get_region (rid
);
4440 svalue_id sid
= reg
->get_value_direct ();
4441 svalue
*sval
= m_model
->get_svalue (sid
);
4443 if (region_svalue
*ptr
= sval
->dyn_cast_region_svalue ())
4445 region_id pointee_rid
= ptr
->get_pointee ();
4446 /* Use const-ness of pointer type to affect mutability. */
4447 bool ptr_is_mutable
= true;
4448 if (ptr
->get_type ()
4449 && TREE_CODE (ptr
->get_type ()) == POINTER_TYPE
4450 && TYPE_READONLY (TREE_TYPE (ptr
->get_type ())))
4451 ptr_is_mutable
= false;
4452 add (pointee_rid
, ptr_is_mutable
);
4455 /* Add descendents of this region. */
4456 region_id_set
descendents (m_model
);
4457 m_model
->get_descendents (rid
, &descendents
, region_id::null ());
4458 for (unsigned i
= 0; i
< m_model
->get_num_regions (); i
++)
4460 region_id iter_rid
= region_id::from_int (i
);
4461 if (descendents
.region_p (iter_rid
))
4462 add (iter_rid
, is_mutable
);
4466 bool mutable_p (region_id rid
)
4468 gcc_assert (!rid
.null_p ());
4469 return bitmap_bit_p (m_mutable_rids
, rid
.as_int ());
4473 region_model
*m_model
;
4475 /* The region ids already seen. This has to be an auto_bitmap rather than
4476 an auto_sbitmap as new regions can be created within the model during
4478 auto_bitmap m_reachable_rids
;
4480 /* The region_ids that can be changed (accessed via non-const pointers). */
4481 auto_bitmap m_mutable_rids
;
4484 /* Handle a call CALL to a function with unknown behavior.
4486 Traverse the regions in this model, determining what regions are
4487 reachable from pointer arguments to CALL and from global variables,
4490 Set all reachable regions to new unknown values and purge sm-state
4491 from their values, and from values that point to them. */
4494 region_model::handle_unrecognized_call (const gcall
*call
,
4495 region_model_context
*ctxt
)
4497 tree fndecl
= get_fndecl_for_call (call
, ctxt
);
4499 reachable_regions
reachable_regions (this);
4501 /* Determine the reachable regions and their mutability. */
4504 region_id globals_rid
= get_globals_region_id ();
4505 if (!globals_rid
.null_p ())
4506 reachable_regions
.add (globals_rid
, true);
4508 /* Params that are pointers. */
4509 tree iter_param_types
= NULL_TREE
;
4511 iter_param_types
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
4512 for (unsigned arg_idx
= 0; arg_idx
< gimple_call_num_args (call
); arg_idx
++)
4514 /* Track expected param type, where available. */
4515 tree param_type
= NULL_TREE
;
4516 if (iter_param_types
)
4518 param_type
= TREE_VALUE (iter_param_types
);
4519 gcc_assert (param_type
);
4520 iter_param_types
= TREE_CHAIN (iter_param_types
);
4523 tree parm
= gimple_call_arg (call
, arg_idx
);
4524 svalue_id parm_sid
= get_rvalue (parm
, ctxt
);
4525 svalue
*parm_sval
= get_svalue (parm_sid
);
4527 if (region_svalue
*parm_ptr
= parm_sval
->dyn_cast_region_svalue ())
4529 region_id pointee_rid
= parm_ptr
->get_pointee ();
4530 bool is_mutable
= true;
4532 && TREE_CODE (param_type
) == POINTER_TYPE
4533 && TYPE_READONLY (TREE_TYPE (param_type
)))
4535 reachable_regions
.add (pointee_rid
, is_mutable
);
4537 // FIXME: what about compound parms that contain ptrs?
4541 /* OK: we now have all reachable regions.
4542 Set them all to new unknown values. */
4543 for (unsigned i
= 0; i
< get_num_regions (); i
++)
4545 region_id iter_rid
= region_id::from_int (i
);
4546 if (reachable_regions
.mutable_p (iter_rid
))
4548 region
*reg
= get_region (iter_rid
);
4550 /* Purge any sm-state for any underlying svalue. */
4551 svalue_id curr_sid
= reg
->get_value_direct ();
4552 if (!curr_sid
.null_p ())
4553 ctxt
->on_unknown_change (curr_sid
);
4555 set_to_new_unknown_value (iter_rid
,
4561 /* Purge sm-state for any remaining svalues that point to regions that
4562 were reachable. This helps suppress leak false-positives.
4564 For example, if we had a malloc call that was cast to a "foo *" type,
4565 we could have a temporary void * for the result of malloc which has its
4566 own svalue, not reachable from the function call, but for which the
4567 "foo *" svalue was reachable. If we don't purge it, the temporary will
4568 be reported as a leak. */
4571 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
4572 if (region_svalue
*ptr
= svalue
->dyn_cast_region_svalue ())
4574 region_id pointee_rid
= ptr
->get_pointee ();
4575 if (reachable_regions
.mutable_p (pointee_rid
))
4576 ctxt
->on_unknown_change (svalue_id::from_int (i
));
4582 /* Update this model for the RETURN_STMT, using CTXT to report any
4586 region_model::on_return (const greturn
*return_stmt
, region_model_context
*ctxt
)
4588 tree callee
= get_current_function ()->decl
;
4589 tree lhs
= DECL_RESULT (callee
);
4590 tree rhs
= gimple_return_retval (return_stmt
);
4593 set_value (get_lvalue (lhs
, ctxt
), get_rvalue (rhs
, ctxt
), ctxt
);
4596 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
4597 ENODE, using CTXT to report any diagnostics.
4599 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
4600 0), as opposed to any second return due to longjmp/sigsetjmp. */
4603 region_model::on_setjmp (const gcall
*call
, const exploded_node
*enode
,
4604 region_model_context
*ctxt
)
4606 region_id buf_rid
= deref_rvalue (gimple_call_arg (call
, 0), ctxt
);
4607 region
*buf
= get_region (buf_rid
);
4609 /* Create a setjmp_svalue for this call and store it in BUF_RID's region. */
4612 setjmp_record
r (enode
, call
);
4613 svalue
*sval
= new setjmp_svalue (r
, buf
->get_type ());
4614 svalue_id new_sid
= add_svalue (sval
);
4615 set_value (buf_rid
, new_sid
, ctxt
);
4618 /* Direct calls to setjmp return 0. */
4619 if (tree lhs
= gimple_call_lhs (call
))
4621 tree zero
= build_int_cst (TREE_TYPE (lhs
), 0);
4622 svalue_id new_sid
= get_or_create_constant_svalue (zero
);
4623 region_id lhs_rid
= get_lvalue (lhs
, ctxt
);
4624 set_value (lhs_rid
, new_sid
, ctxt
);
4628 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
4629 to a "setjmp" at SETJMP_CALL where the final stack depth should be
4630 SETJMP_STACK_DEPTH. Purge any stack frames, potentially reporting on
4634 region_model::on_longjmp (const gcall
*longjmp_call
, const gcall
*setjmp_call
,
4635 int setjmp_stack_depth
,
4636 region_model_context
*ctxt
)
4638 /* Evaluate the val, using the frame of the "longjmp". */
4639 tree fake_retval
= gimple_call_arg (longjmp_call
, 1);
4640 svalue_id fake_retval_sid
= get_rvalue (fake_retval
, ctxt
);
4642 /* Pop any frames until we reach the stack depth of the function where
4643 setjmp was called. */
4644 gcc_assert (get_stack_depth () >= setjmp_stack_depth
);
4645 while (get_stack_depth () > setjmp_stack_depth
)
4647 /* Don't purge unused svalues yet, as we're using fake_retval_sid. */
4648 pop_frame (false, NULL
, ctxt
);
4651 gcc_assert (get_stack_depth () == setjmp_stack_depth
);
4653 /* Assign to LHS of "setjmp" in new_state. */
4654 if (tree lhs
= gimple_call_lhs (setjmp_call
))
4656 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
4657 tree t_zero
= build_int_cst (TREE_TYPE (fake_retval
), 0);
4658 svalue_id zero_sid
= get_or_create_constant_svalue (t_zero
);
4659 tristate eq_zero
= eval_condition (fake_retval_sid
, EQ_EXPR
, zero_sid
);
4660 /* If we have 0, use 1. */
4661 if (eq_zero
.is_true ())
4663 tree t_one
= build_int_cst (TREE_TYPE (fake_retval
), 1);
4664 svalue_id one_sid
= get_or_create_constant_svalue (t_one
);
4665 fake_retval_sid
= one_sid
;
4669 /* Otherwise note that the value is nonzero. */
4670 m_constraints
->add_constraint (fake_retval_sid
, NE_EXPR
, zero_sid
);
4673 region_id lhs_rid
= get_lvalue (lhs
, ctxt
);
4674 set_value (lhs_rid
, fake_retval_sid
, ctxt
);
4677 /* Now that we've assigned the fake_retval, we can purge the unused
4678 svalues, which could detect leaks. */
4679 purge_unused_svalues (NULL
, ctxt
, NULL
);
4683 /* Update this region_model for a phi stmt of the form
4684 LHS = PHI <...RHS...>.
4685 where RHS is for the appropriate edge. */
4688 region_model::handle_phi (const gphi
*phi
,
4689 tree lhs
, tree rhs
, bool is_back_edge
,
4690 region_model_context
*ctxt
)
4692 /* For now, don't bother tracking the .MEM SSA names. */
4693 if (tree var
= SSA_NAME_VAR (lhs
))
4694 if (TREE_CODE (var
) == VAR_DECL
)
4695 if (VAR_DECL_IS_VIRTUAL_OPERAND (var
))
4698 svalue_id rhs_sid
= get_rvalue (rhs
, ctxt
);
4700 if (is_back_edge
&& get_svalue (rhs_sid
)->get_kind () != SK_UNKNOWN
)
4702 /* If we have a back edge, we probably have a loop.
4703 Use an unknown value, to avoid effectively unrolling the
4705 To terminate, we need to avoid generating a series of
4706 models with an unbounded monotonically increasing number of
4707 redundant unknown values; hence we need to purge svalues
4708 before inserting the state into the exploded graph, to
4709 collect unused svalues. */
4710 set_to_new_unknown_value (get_lvalue (lhs
, ctxt
), TREE_TYPE (lhs
), ctxt
);
4713 set_value (get_lvalue (lhs
, ctxt
), rhs_sid
, ctxt
);
4716 ctxt
->on_phi (phi
, rhs
);
4719 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
4721 Get the id of the region for PV within this region_model,
4722 emitting any diagnostics to CTXT. */
4725 region_model::get_lvalue_1 (path_var pv
, region_model_context
*ctxt
)
4727 tree expr
= pv
.m_tree
;
4731 switch (TREE_CODE (expr
))
4734 return make_region_for_unexpected_tree_code (ctxt
, expr
,
4735 dump_location_t ());
4739 tree array
= TREE_OPERAND (expr
, 0);
4740 tree index
= TREE_OPERAND (expr
, 1);
4742 // TODO: operands 2 and 3, if present:
4743 gcc_assert (TREE_OPERAND (expr
, 2) == NULL_TREE
);
4744 gcc_assert (TREE_OPERAND (expr
, 3) == NULL_TREE
);
4747 region_id array_rid
= get_lvalue (array
, ctxt
);
4748 svalue_id index_sid
= get_rvalue (index
, ctxt
);
4749 region
*base_array_reg
= get_region (array_rid
);
4750 array_region
*array_reg
= base_array_reg
->dyn_cast_array_region ();
4753 /* Normally, array_rid ought to refer to an array_region, since
4754 array's type will be ARRAY_TYPE. However, if we have an
4755 unexpected tree code for array, we could have a
4756 symbolic_region here. If so, we're in error-handling. */
4757 gcc_assert (base_array_reg
->get_type () == NULL_TREE
);
4758 return make_region_for_unexpected_tree_code (ctxt
, expr
,
4759 dump_location_t ());
4761 return array_reg
->get_element (this, array_rid
, index_sid
, ctxt
);
4767 /* For now, create a view, as if a cast, ignoring the bit positions. */
4768 tree obj
= TREE_OPERAND (expr
, 0);
4769 return get_or_create_view (get_lvalue (obj
, ctxt
), TREE_TYPE (expr
),
4776 tree ptr
= TREE_OPERAND (expr
, 0);
4777 tree offset
= TREE_OPERAND (expr
, 1);
4778 svalue_id ptr_sid
= get_rvalue (ptr
, ctxt
);
4779 svalue_id offset_sid
= get_rvalue (offset
, ctxt
);
4780 return get_or_create_mem_ref (TREE_TYPE (expr
), ptr_sid
,
4786 /* Handle globals. */
4787 if (is_global_var (expr
))
4789 region_id globals_rid
4790 = get_root_region ()->ensure_globals_region (this);
4791 map_region
*globals
= get_region
<map_region
> (globals_rid
);
4792 region_id var_rid
= globals
->get_or_create (this, globals_rid
, expr
,
4793 TREE_TYPE (expr
), ctxt
);
4803 gcc_assert (TREE_CODE (expr
) == SSA_NAME
4804 || TREE_CODE (expr
) == PARM_DECL
4805 || TREE_CODE (expr
) == VAR_DECL
4806 || TREE_CODE (expr
) == RESULT_DECL
);
4808 int stack_depth
= pv
.m_stack_depth
;
4809 stack_region
*stack
= get_root_region ()->get_stack_region (this);
4811 region_id frame_rid
= stack
->get_frame_rid (stack_depth
);
4812 frame_region
*frame
= get_region
<frame_region
> (frame_rid
);
4814 region_id child_rid
= frame
->get_or_create (this, frame_rid
, expr
,
4815 TREE_TYPE (expr
), ctxt
);
4822 tree obj
= TREE_OPERAND (expr
, 0);
4823 tree field
= TREE_OPERAND (expr
, 1);
4824 tree obj_type
= TREE_TYPE (obj
);
4825 if (TREE_CODE (obj_type
) != RECORD_TYPE
4826 && TREE_CODE (obj_type
) != UNION_TYPE
)
4827 return make_region_for_unexpected_tree_code (ctxt
, obj_type
,
4828 dump_location_t ());
4829 region_id obj_rid
= get_lvalue (obj
, ctxt
);
4830 region_id struct_or_union_rid
4831 = get_or_create_view (obj_rid
, TREE_TYPE (obj
), ctxt
);
4832 return get_field_region (struct_or_union_rid
, field
, ctxt
);
4838 tree cst_type
= TREE_TYPE (expr
);
4839 region_id cst_rid
= add_region_for_type (m_root_rid
, cst_type
, ctxt
);
4840 if (tree value
= DECL_INITIAL (expr
))
4842 svalue_id sid
= get_rvalue (value
, ctxt
);
4843 get_region (cst_rid
)->set_value (*this, cst_rid
, sid
, ctxt
);
4851 tree cst_type
= TREE_TYPE (expr
);
4852 array_region
*cst_region
= new array_region (m_root_rid
, cst_type
);
4853 region_id cst_rid
= add_region (cst_region
);
4854 svalue_id cst_sid
= get_or_create_constant_svalue (expr
);
4855 cst_region
->set_value (*this, cst_rid
, cst_sid
, ctxt
);
4861 case VIEW_CONVERT_EXPR
:
4863 tree obj
= TREE_OPERAND (expr
, 0);
4864 return get_or_create_view (get_lvalue (obj
, ctxt
), TREE_TYPE (expr
),
4871 /* If we see a tree code we don't know how to handle, rather than
4872 ICE or generate bogus results, create a dummy region, and notify
4873 CTXT so that it can mark the new state as being not properly
4874 modelled. The exploded graph can then stop exploring that path,
4875 since any diagnostics we might issue will have questionable
4879 region_model::make_region_for_unexpected_tree_code (region_model_context
*ctxt
,
4881 const dump_location_t
&loc
)
4885 = add_region (new symbolic_region (m_root_rid
, NULL_TREE
, false));
4886 ctxt
->on_unexpected_tree_code (t
, loc
);
4890 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
4893 assert_compat_types (tree src_type
, tree dst_type
)
4895 if (src_type
&& dst_type
&& !VOID_TYPE_P (dst_type
))
4896 gcc_checking_assert (useless_type_conversion_p (src_type
, dst_type
));
4899 /* Get the id of the region for PV within this region_model,
4900 emitting any diagnostics to CTXT. */
4903 region_model::get_lvalue (path_var pv
, region_model_context
*ctxt
)
4905 if (pv
.m_tree
== NULL_TREE
)
4906 return region_id::null ();
4908 region_id result_rid
= get_lvalue_1 (pv
, ctxt
);
4909 assert_compat_types (get_region (result_rid
)->get_type (),
4910 TREE_TYPE (pv
.m_tree
));
4914 /* Get the region_id for EXPR within this region_model (assuming the most
4915 recent stack frame if it's a local). */
4918 region_model::get_lvalue (tree expr
, region_model_context
*ctxt
)
4920 return get_lvalue (path_var (expr
, get_stack_depth () - 1), ctxt
);
4923 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
4925 Get the value of PV within this region_model,
4926 emitting any diagnostics to CTXT. */
4929 region_model::get_rvalue_1 (path_var pv
, region_model_context
*ctxt
)
4931 gcc_assert (pv
.m_tree
);
4933 switch (TREE_CODE (pv
.m_tree
))
4937 svalue
*unknown_sval
= new unknown_svalue (TREE_TYPE (pv
.m_tree
));
4938 return add_svalue (unknown_sval
);
4945 tree expr
= pv
.m_tree
;
4946 tree op0
= TREE_OPERAND (expr
, 0);
4947 if (TREE_CODE (op0
) == FUNCTION_DECL
)
4948 return get_svalue_for_fndecl (TREE_TYPE (expr
), op0
, ctxt
);
4949 else if (TREE_CODE (op0
) == LABEL_DECL
)
4950 return get_svalue_for_label (TREE_TYPE (expr
), op0
, ctxt
);
4951 region_id expr_rid
= get_lvalue (op0
, ctxt
);
4952 return get_or_create_ptr_svalue (TREE_TYPE (expr
), expr_rid
);
4958 region_id element_rid
= get_lvalue (pv
, ctxt
);
4959 return get_region (element_rid
)->get_value (*this, true, ctxt
);
4965 return get_or_create_constant_svalue (pv
.m_tree
);
4974 region_id var_rid
= get_lvalue (pv
, ctxt
);
4975 return get_region (var_rid
)->get_value (*this, true, ctxt
);
4980 /* Get the value of PV within this region_model,
4981 emitting any diagnostics to CTXT. */
4984 region_model::get_rvalue (path_var pv
, region_model_context
*ctxt
)
4986 if (pv
.m_tree
== NULL_TREE
)
4987 return svalue_id::null ();
4988 svalue_id result_sid
= get_rvalue_1 (pv
, ctxt
);
4990 assert_compat_types (get_svalue (result_sid
)->get_type (),
4991 TREE_TYPE (pv
.m_tree
));
4996 /* Get the value of EXPR within this region_model (assuming the most
4997 recent stack frame if it's a local). */
5000 region_model::get_rvalue (tree expr
, region_model_context
*ctxt
)
5002 return get_rvalue (path_var (expr
, get_stack_depth () - 1), ctxt
);
5005 /* Return an svalue_id for a pointer to RID of type PTR_TYPE, reusing
5006 existing pointer values if one is available. */
5009 region_model::get_or_create_ptr_svalue (tree ptr_type
, region_id rid
)
5011 /* Reuse existing region_svalue, if one of the right type is
5013 /* In theory we could stash a svalue_id in "region", but differing
5014 pointer types muddles things.
5015 For now, just do a linear search through all existing svalues. */
5018 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
5019 if (region_svalue
*ptr_svalue
= svalue
->dyn_cast_region_svalue ())
5020 if (ptr_svalue
->get_pointee () == rid
5021 && ptr_svalue
->get_type () == ptr_type
)
5022 return svalue_id::from_int (i
);
5024 return add_svalue (new region_svalue (ptr_type
, rid
));
5027 /* Return an svalue_id for a constant_svalue for CST_EXPR,
5028 creating the constant_svalue if necessary.
5029 The constant_svalue instances are reused, based on pointer equality
5033 region_model::get_or_create_constant_svalue (tree cst_expr
)
5035 gcc_assert (cst_expr
);
5037 /* Reuse one if it already exists. */
5038 // TODO: maybe store a map, rather than do linear search?
5041 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
5042 if (svalue
->maybe_get_constant () == cst_expr
)
5043 return svalue_id::from_int (i
);
5045 svalue_id cst_sid
= add_svalue (new constant_svalue (cst_expr
));
5049 /* Return an svalue_id for a region_svalue for FNDECL,
5050 creating the function_region if necessary. */
5053 region_model::get_svalue_for_fndecl (tree ptr_type
, tree fndecl
,
5054 region_model_context
*ctxt
)
5056 gcc_assert (TREE_CODE (fndecl
) == FUNCTION_DECL
);
5057 region_id function_rid
= get_region_for_fndecl (fndecl
, ctxt
);
5058 return get_or_create_ptr_svalue (ptr_type
, function_rid
);
5061 /* Return a region_id for a function_region for FNDECL,
5062 creating it if necessary. */
5065 region_model::get_region_for_fndecl (tree fndecl
,
5066 region_model_context
*ctxt
)
5068 gcc_assert (TREE_CODE (fndecl
) == FUNCTION_DECL
);
5070 region_id code_rid
= get_root_region ()->ensure_code_region (this);
5071 code_region
*code
= get_root_region ()->get_code_region (this);
5073 return code
->get_or_create (this, code_rid
, fndecl
, TREE_TYPE (fndecl
),
5077 /* Return an svalue_id for a region_svalue for LABEL,
5078 creating the label_region if necessary. */
5081 region_model::get_svalue_for_label (tree ptr_type
, tree label
,
5082 region_model_context
*ctxt
)
5084 gcc_assert (TREE_CODE (label
) == LABEL_DECL
);
5085 region_id label_rid
= get_region_for_label (label
, ctxt
);
5086 return get_or_create_ptr_svalue (ptr_type
, label_rid
);
5089 /* Return a region_id for a label_region for LABEL,
5090 creating it if necessary. */
5093 region_model::get_region_for_label (tree label
,
5094 region_model_context
*ctxt
)
5096 gcc_assert (TREE_CODE (label
) == LABEL_DECL
);
5098 tree fndecl
= DECL_CONTEXT (label
);
5099 gcc_assert (fndecl
&& TREE_CODE (fndecl
) == FUNCTION_DECL
);
5101 region_id func_rid
= get_region_for_fndecl (fndecl
, ctxt
);
5102 function_region
*func_reg
= get_region
<function_region
> (func_rid
);
5103 return func_reg
->get_or_create (this, func_rid
, label
, TREE_TYPE (label
),
5107 /* Build a cast of SRC_EXPR to DST_TYPE, or return NULL_TREE.
5109 Adapted from gcc::jit::playback::context::build_cast, which in turn is
5111 - c/c-typeck.c:build_c_cast
5112 - c/c-convert.c: convert
5114 Only some kinds of cast are currently supported here. */
5117 build_cast (tree dst_type
, tree src_expr
)
5119 tree result
= targetm
.convert_to_type (dst_type
, src_expr
);
5122 enum tree_code dst_code
= TREE_CODE (dst_type
);
5127 result
= convert_to_integer (dst_type
, src_expr
);
5131 /* Compare with c_objc_common_truthvalue_conversion and
5132 c_common_truthvalue_conversion. */
5133 /* For now, convert to: (src_expr != 0) */
5134 result
= build2 (NE_EXPR
, dst_type
,
5136 build_int_cst (TREE_TYPE (src_expr
), 0));
5140 result
= convert_to_real (dst_type
, src_expr
);
5144 result
= build1 (NOP_EXPR
, dst_type
, src_expr
);
5151 if (TREE_CODE (result
) != C_MAYBE_CONST_EXPR
)
5152 result
= fold (result
);
5157 /* If the type of SID's underlying value is DST_TYPE, return SID.
5158 Otherwise, attempt to create (or reuse) an svalue representing an access
5159 of SID as a DST_TYPE and return that value's svalue_id. */
5162 region_model::maybe_cast_1 (tree dst_type
, svalue_id sid
)
5164 svalue
*sval
= get_svalue (sid
);
5165 tree src_type
= sval
->get_type ();
5166 if (src_type
== dst_type
)
5169 if (POINTER_TYPE_P (dst_type
)
5170 || POINTER_TYPE_P (src_type
))
5172 /* Pointer to region. */
5173 if (region_svalue
*ptr_sval
= sval
->dyn_cast_region_svalue ())
5174 return get_or_create_ptr_svalue (dst_type
, ptr_sval
->get_pointee ());
5176 /* Unknown pointer? Get or create a new unknown pointer of the
5177 correct type, preserving the equality between the pointers. */
5178 if (sval
->dyn_cast_unknown_svalue ())
5180 equiv_class
&ec
= m_constraints
->get_equiv_class (sid
);
5182 /* Look for an existing pointer of the correct type within the EC. */
5184 svalue_id
*equiv_sid
;
5185 FOR_EACH_VEC_ELT (ec
.m_vars
, i
, equiv_sid
)
5187 svalue
*equiv_val
= get_svalue (*equiv_sid
);
5188 if (equiv_val
->get_type () == dst_type
)
5192 /* Otherwise, create a new unknown pointer of the correct type. */
5193 svalue
*unknown_sval
= new unknown_svalue (dst_type
);
5194 svalue_id new_ptr_sid
= add_svalue (unknown_sval
);
5195 m_constraints
->add_constraint (sid
, EQ_EXPR
, new_ptr_sid
);
5200 /* Attempt to cast constants. */
5201 if (tree src_cst
= sval
->maybe_get_constant ())
5203 if (tree dst
= build_cast (dst_type
, src_cst
))
5204 if (CONSTANT_CLASS_P (dst
))
5205 return get_or_create_constant_svalue (dst
);
5208 /* Otherwise, return a new unknown value. */
5209 svalue
*unknown_sval
= new unknown_svalue (dst_type
);
5210 return add_svalue (unknown_sval
);
5213 /* If the type of SID's underlying value is DST_TYPE, return SID.
5214 Otherwise, attempt to create (or reuse) an svalue representing an access
5215 of SID as a DST_TYPE and return that value's svalue_id.
5217 If the result != SID, then call CTXT's on_cast vfunc (if CTXT is non-NULL),
5218 so that sm-state can be propagated from SID to the result. */
5221 region_model::maybe_cast (tree dst_type
, svalue_id sid
,
5222 region_model_context
*ctxt
)
5224 svalue_id result
= maybe_cast_1 (dst_type
, sid
);
5228 /* Notify ctxt about a cast, so any sm-state can be copied. */
5229 ctxt
->on_cast (sid
, result
);
5234 /* Ensure that the region for OBJ_RID has a child region for FIELD;
5235 return the child region's region_id. */
5238 region_model::get_field_region (region_id struct_or_union_rid
, tree field
,
5239 region_model_context
*ctxt
)
5241 struct_or_union_region
*sou_reg
5242 = get_region
<struct_or_union_region
> (struct_or_union_rid
);
5244 /* Inherit constness from parent type. */
5245 const int qual_mask
= TYPE_QUAL_CONST
;
5246 int sou_quals
= TYPE_QUALS (sou_reg
->get_type ()) & qual_mask
;
5247 tree field_type
= TREE_TYPE (field
);
5248 tree field_type_with_quals
= build_qualified_type (field_type
, sou_quals
);
5250 // TODO: maybe convert to a vfunc?
5251 if (sou_reg
->get_kind () == RK_UNION
)
5254 Get a view of the union as a whole, with the type of the field. */
5256 = get_or_create_view (struct_or_union_rid
, field_type_with_quals
, ctxt
);
5263 = sou_reg
->get_or_create (this, struct_or_union_rid
, field
,
5264 field_type_with_quals
, ctxt
);
5269 /* Get a region_id for referencing PTR_SID, creating a region if need be, and
5270 potentially generating warnings via CTXT. */
5273 region_model::deref_rvalue (svalue_id ptr_sid
, region_model_context
*ctxt
)
5275 gcc_assert (!ptr_sid
.null_p ());
5276 svalue
*ptr_svalue
= get_svalue (ptr_sid
);
5277 gcc_assert (ptr_svalue
);
5279 switch (ptr_svalue
->get_kind ())
5283 region_svalue
*region_sval
= as_a
<region_svalue
*> (ptr_svalue
);
5284 return region_sval
->get_pointee ();
5288 goto create_symbolic_region
;
5293 if (tree ptr
= get_representative_tree (ptr_sid
))
5295 poisoned_svalue
*poisoned_sval
5296 = as_a
<poisoned_svalue
*> (ptr_svalue
);
5297 enum poison_kind pkind
= poisoned_sval
->get_poison_kind ();
5298 ctxt
->warn (new poisoned_value_diagnostic (ptr
, pkind
));
5300 goto create_symbolic_region
;
5305 create_symbolic_region
:
5306 /* We need a symbolic_region to represent this unknown region.
5307 We don't know if it on the heap, stack, or a global,
5308 so use the root region as parent. */
5310 = add_region (new symbolic_region (m_root_rid
, NULL_TREE
, false));
5312 /* We need to write the region back into the pointer,
5313 or we'll get a new, different region each time.
5314 We do this by changing the meaning of ptr_sid, replacing
5315 the unknown value with the ptr to the new region.
5316 We replace the meaning of the ID rather than simply writing
5317 to PTR's lvalue since there could be several places sharing
5318 the same unknown ptr value. */
5320 = new region_svalue (ptr_svalue
->get_type (), new_rid
);
5321 replace_svalue (ptr_sid
, ptr_val
);
5327 goto create_symbolic_region
;
5333 /* Get a region_id for referencing PTR, creating a region if need be, and
5334 potentially generating warnings via CTXT. */
5337 region_model::deref_rvalue (tree ptr
, region_model_context
*ctxt
)
5339 svalue_id ptr_sid
= get_rvalue (ptr
, ctxt
);
5340 return deref_rvalue (ptr_sid
, ctxt
);
5343 /* Set the value of the region given by LHS_RID to the value given
5347 region_model::set_value (region_id lhs_rid
, svalue_id rhs_sid
,
5348 region_model_context
*ctxt
)
5350 gcc_assert (!lhs_rid
.null_p ());
5351 gcc_assert (!rhs_sid
.null_p ());
5352 get_region (lhs_rid
)->set_value (*this, lhs_rid
, rhs_sid
, ctxt
);
5355 /* Set the value of the region given by LHS to the value given
5359 region_model::set_value (tree lhs
, tree rhs
, region_model_context
*ctxt
)
5361 region_id lhs_rid
= get_lvalue (lhs
, ctxt
);
5362 svalue_id rhs_sid
= get_rvalue (rhs
, ctxt
);
5363 gcc_assert (!lhs_rid
.null_p ());
5364 gcc_assert (!rhs_sid
.null_p ());
5365 set_value (lhs_rid
, rhs_sid
, ctxt
);
5368 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5372 region_model::eval_condition (svalue_id lhs_sid
,
5374 svalue_id rhs_sid
) const
5376 svalue
*lhs
= get_svalue (lhs_sid
);
5377 svalue
*rhs
= get_svalue (rhs_sid
);
5379 /* For now, make no attempt to capture constraints on floating-point
5381 if ((lhs
->get_type () && FLOAT_TYPE_P (lhs
->get_type ()))
5382 || (rhs
->get_type () && FLOAT_TYPE_P (rhs
->get_type ())))
5383 return tristate::unknown ();
5385 tristate ts
= eval_condition_without_cm (lhs_sid
, op
, rhs_sid
);
5390 /* Otherwise, try constraints. */
5391 return m_constraints
->eval_condition (lhs_sid
, op
, rhs_sid
);
5394 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5395 this model, without resorting to the constraint_manager.
5397 This is exposed so that impl_region_model_context::on_state_leak can
5398 check for equality part-way through region_model::purge_unused_svalues
5399 without risking creating new ECs. */
5402 region_model::eval_condition_without_cm (svalue_id lhs_sid
,
5404 svalue_id rhs_sid
) const
5406 svalue
*lhs
= get_svalue (lhs_sid
);
5407 svalue
*rhs
= get_svalue (rhs_sid
);
5411 /* See what we know based on the values. */
5414 /* For now, make no attempt to capture constraints on floating-point
5416 if ((lhs
->get_type () && FLOAT_TYPE_P (lhs
->get_type ()))
5417 || (rhs
->get_type () && FLOAT_TYPE_P (rhs
->get_type ())))
5418 return tristate::unknown ();
5422 /* If we have the same svalue, then we have equality
5423 (apart from NaN-handling).
5424 TODO: should this definitely be the case for poisoned values? */
5430 return tristate::TS_TRUE
;
5435 return tristate::TS_FALSE
;
5438 /* For other ops, use the logic below. */
5443 /* If we have a pair of region_svalues, compare them. */
5444 if (region_svalue
*lhs_ptr
= lhs
->dyn_cast_region_svalue ())
5445 if (region_svalue
*rhs_ptr
= rhs
->dyn_cast_region_svalue ())
5447 tristate res
= region_svalue::eval_condition (lhs_ptr
, op
, rhs_ptr
);
5448 if (res
.is_known ())
5450 /* Otherwise, only known through constraints. */
5453 /* If we have a pair of constants, compare them. */
5454 if (constant_svalue
*cst_lhs
= lhs
->dyn_cast_constant_svalue ())
5455 if (constant_svalue
*cst_rhs
= rhs
->dyn_cast_constant_svalue ())
5456 return constant_svalue::eval_condition (cst_lhs
, op
, cst_rhs
);
5458 /* Handle comparison of a region_svalue against zero. */
5459 if (region_svalue
*ptr
= lhs
->dyn_cast_region_svalue ())
5460 if (constant_svalue
*cst_rhs
= rhs
->dyn_cast_constant_svalue ())
5461 if (zerop (cst_rhs
->get_constant ()))
5463 /* A region_svalue is a non-NULL pointer, except in certain
5464 special cases (see the comment for region::non_null_p. */
5465 region
*pointee
= get_region (ptr
->get_pointee ());
5466 if (pointee
->non_null_p (*this))
5476 return tristate::TS_FALSE
;
5481 return tristate::TS_TRUE
;
5487 return tristate::TS_UNKNOWN
;
5490 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5491 If it is consistent with existing constraints, add it, and return true.
5492 Return false if it contradicts existing constraints.
5493 Use CTXT for reporting any diagnostics associated with the accesses. */
5496 region_model::add_constraint (tree lhs
, enum tree_code op
, tree rhs
,
5497 region_model_context
*ctxt
)
5499 /* For now, make no attempt to capture constraints on floating-point
5501 if (FLOAT_TYPE_P (TREE_TYPE (lhs
)) || FLOAT_TYPE_P (TREE_TYPE (rhs
)))
5504 svalue_id lhs_sid
= get_rvalue (lhs
, ctxt
);
5505 svalue_id rhs_sid
= get_rvalue (rhs
, ctxt
);
5507 tristate t_cond
= eval_condition (lhs_sid
, op
, rhs_sid
);
5509 /* If we already have the condition, do nothing. */
5510 if (t_cond
.is_true ())
5513 /* Reject a constraint that would contradict existing knowledge, as
5515 if (t_cond
.is_false ())
5518 /* Store the constraint. */
5519 m_constraints
->add_constraint (lhs_sid
, op
, rhs_sid
);
5521 add_any_constraints_from_ssa_def_stmt (lhs
, op
, rhs
, ctxt
);
5523 /* Notify the context, if any. This exists so that the state machines
5524 in a program_state can be notified about the condition, and so can
5525 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5526 when synthesizing constraints as above. */
5528 ctxt
->on_condition (lhs
, op
, rhs
);
5533 /* Subroutine of region_model::add_constraint for handling optimized
5534 && and || conditionals.
5536 If we have an SSA_NAME for a boolean compared against 0,
5537 look at anything implied by the def stmt and call add_constraint
5538 for it (which could recurse).
5540 For example, if we have
5544 and add the constraint
5546 then the def stmt for _3 implies that _1 and _2 are both false,
5547 and hence we can add the constraints:
5552 region_model::add_any_constraints_from_ssa_def_stmt (tree lhs
,
5555 region_model_context
*ctxt
)
5557 if (TREE_CODE (lhs
) != SSA_NAME
)
5563 if (op
!= NE_EXPR
&& op
!= EQ_EXPR
)
5566 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
5567 if (const gassign
*assign
= dyn_cast
<gassign
*> (def_stmt
))
5568 add_any_constraints_from_gassign (op
, rhs
, assign
, ctxt
);
5569 else if (gcall
*call
= dyn_cast
<gcall
*> (def_stmt
))
5570 add_any_constraints_from_gcall (op
, rhs
, call
, ctxt
);
5573 /* Add any constraints for an SSA_NAME defined by ASSIGN
5574 where the result OP RHS. */
5577 region_model::add_any_constraints_from_gassign (enum tree_code op
,
5579 const gassign
*assign
,
5580 region_model_context
*ctxt
)
5583 - "LHS != false" (i.e. LHS is true), or
5584 - "LHS == false" (i.e. LHS is false). */
5585 bool is_true
= op
== NE_EXPR
;
5587 enum tree_code rhs_code
= gimple_assign_rhs_code (assign
);
5596 add_constraint (gimple_assign_rhs1 (assign
), op
, rhs
, ctxt
);
5604 /* ...and "LHS == (rhs1 & rhs2) i.e. "(rhs1 & rhs2)" is true
5605 then both rhs1 and rhs2 must be true. */
5606 tree rhs1
= gimple_assign_rhs1 (assign
);
5607 tree rhs2
= gimple_assign_rhs2 (assign
);
5608 add_constraint (rhs1
, NE_EXPR
, boolean_false_node
, ctxt
);
5609 add_constraint (rhs2
, NE_EXPR
, boolean_false_node
, ctxt
);
5618 /* ...and "LHS == (rhs1 | rhs2)
5619 i.e. "(rhs1 | rhs2)" is false
5620 then both rhs1 and rhs2 must be false. */
5621 tree rhs1
= gimple_assign_rhs1 (assign
);
5622 tree rhs2
= gimple_assign_rhs2 (assign
);
5623 add_constraint (rhs1
, EQ_EXPR
, boolean_false_node
, ctxt
);
5624 add_constraint (rhs2
, EQ_EXPR
, boolean_false_node
, ctxt
);
5632 /* ...and "LHS == (rhs1 OP rhs2)"
5633 then rhs1 OP rhs2 must have the same logical value as LHS. */
5634 tree rhs1
= gimple_assign_rhs1 (assign
);
5635 tree rhs2
= gimple_assign_rhs2 (assign
);
5638 = invert_tree_comparison (rhs_code
, false /* honor_nans */);
5639 add_constraint (rhs1
, rhs_code
, rhs2
, ctxt
);
5645 /* Add any constraints for an SSA_NAME defined by CALL
5646 where the result OP RHS. */
5649 region_model::add_any_constraints_from_gcall (enum tree_code op
,
5652 region_model_context
*ctxt
)
5654 if (gimple_call_builtin_p (call
, BUILT_IN_EXPECT
)
5655 || gimple_call_builtin_p (call
, BUILT_IN_EXPECT_WITH_PROBABILITY
)
5656 || gimple_call_internal_p (call
, IFN_BUILTIN_EXPECT
))
5658 /* __builtin_expect's return value is its initial argument. */
5659 add_constraint (gimple_call_arg (call
, 0), op
, rhs
, ctxt
);
5663 /* Determine what is known about the condition "LHS OP RHS" within
5665 Use CTXT for reporting any diagnostics associated with the accesses. */
5668 region_model::eval_condition (tree lhs
,
5671 region_model_context
*ctxt
)
5673 /* For now, make no attempt to model constraints on floating-point
5675 if (FLOAT_TYPE_P (TREE_TYPE (lhs
)) || FLOAT_TYPE_P (TREE_TYPE (rhs
)))
5676 return tristate::unknown ();
5678 return eval_condition (get_rvalue (lhs
, ctxt
), op
, get_rvalue (rhs
, ctxt
));
5681 /* If SID is a constant value, return the underlying tree constant.
5682 Otherwise, return NULL_TREE. */
5685 region_model::maybe_get_constant (svalue_id sid
) const
5687 gcc_assert (!sid
.null_p ());
5688 svalue
*sval
= get_svalue (sid
);
5689 return sval
->maybe_get_constant ();
5692 /* Create a new child region of the heap (creating the heap region if
5694 Return the region_id of the new child region. */
5697 region_model::add_new_malloc_region ()
5700 = get_root_region ()->ensure_heap_region (this);
5701 return add_region (new symbolic_region (heap_rid
, NULL_TREE
, true));
5704 /* Attempt to return a tree that represents SID, or return NULL_TREE. */
5707 region_model::get_representative_tree (svalue_id sid
) const
5712 /* Find the first region that stores the value (e.g. a local) and
5713 generate a representative tree for it. */
5716 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
5717 if (sid
== region
->get_value_direct ())
5719 path_var pv
= get_representative_path_var (region_id::from_int (i
));
5724 /* Handle string literals and various other pointers. */
5725 svalue
*sval
= get_svalue (sid
);
5726 if (region_svalue
*ptr_sval
= sval
->dyn_cast_region_svalue ())
5728 region_id rid
= ptr_sval
->get_pointee ();
5729 path_var pv
= get_representative_path_var (rid
);
5731 return build1 (ADDR_EXPR
,
5732 TREE_TYPE (sval
->get_type ()),
5736 return maybe_get_constant (sid
);
5739 /* Attempt to return a path_var that represents the region, or return
5741 For example, a region for a field of a local would be a path_var
5742 wrapping a COMPONENT_REF. */
5745 region_model::get_representative_path_var (region_id rid
) const
5747 region
*reg
= get_region (rid
);
5748 region
*parent_reg
= get_region (reg
->get_parent ());
5749 region_id stack_rid
= get_stack_region_id ();
5750 if (!stack_rid
.null_p ())
5751 if (parent_reg
&& parent_reg
->get_parent () == stack_rid
)
5753 frame_region
*parent_frame
= (frame_region
*)parent_reg
;
5754 tree t
= parent_frame
->get_tree_for_child_region (rid
);
5755 return path_var (t
, parent_frame
->get_depth ());
5757 if (reg
->get_parent () == get_globals_region_id ())
5759 map_region
*globals
= get_root_region ()->get_globals_region (this);
5761 return path_var (globals
->get_tree_for_child_region (rid
), -1);
5764 /* Handle e.g. fields of a local by recursing. */
5765 region_id parent_rid
= reg
->get_parent ();
5768 if (reg
->is_view_p ())
5770 path_var parent_pv
= get_representative_path_var (parent_rid
);
5771 if (parent_pv
.m_tree
&& reg
->get_type ())
5772 return path_var (build1 (NOP_EXPR
,
5775 parent_pv
.m_stack_depth
);
5778 if (parent_reg
->get_kind () == RK_STRUCT
)
5780 map_region
*parent_map_region
= (map_region
*)parent_reg
;
5781 /* This can fail if we have a view, rather than a field. */
5783 = parent_map_region
->get_tree_for_child_region (rid
))
5785 path_var parent_pv
= get_representative_path_var (parent_rid
);
5786 if (parent_pv
.m_tree
&& TREE_CODE (child_key
) == FIELD_DECL
)
5787 return path_var (build3 (COMPONENT_REF
,
5788 TREE_TYPE (child_key
),
5789 parent_pv
.m_tree
, child_key
,
5791 parent_pv
.m_stack_depth
);
5795 /* Handle elements within an array. */
5796 if (array_region
*array_reg
= parent_reg
->dyn_cast_array_region ())
5798 array_region::key_t key
;
5799 if (array_reg
->get_key_for_child_region (rid
, &key
))
5801 path_var parent_pv
= get_representative_path_var (parent_rid
);
5802 if (parent_pv
.m_tree
&& reg
->get_type ())
5804 tree index
= array_reg
->constant_from_key (key
);
5805 return path_var (build4 (ARRAY_REF
,
5807 parent_pv
.m_tree
, index
,
5808 NULL_TREE
, NULL_TREE
),
5809 parent_pv
.m_stack_depth
);
5815 /* Handle string literals. */
5816 svalue_id sid
= reg
->get_value_direct ();
5817 if (svalue
*sval
= get_svalue (sid
))
5818 if (tree cst
= sval
->maybe_get_constant ())
5819 if (TREE_CODE (cst
) == STRING_CST
)
5820 return path_var (cst
, 0);
5822 return path_var (NULL_TREE
, 0);
5825 /* Locate all regions that directly have value SID and append representative
5826 path_var instances for them into *OUT. */
5829 region_model::get_path_vars_for_svalue (svalue_id sid
, vec
<path_var
> *out
) const
5833 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
5834 if (sid
== region
->get_value_direct ())
5836 path_var pv
= get_representative_path_var (region_id::from_int (i
));
5838 out
->safe_push (pv
);
5842 /* Set DST_RID value to be a new unknown value of type TYPE. */
5845 region_model::set_to_new_unknown_value (region_id dst_rid
, tree type
,
5846 region_model_context
*ctxt
)
5848 gcc_assert (!dst_rid
.null_p ());
5849 svalue_id new_sid
= add_svalue (new unknown_svalue (type
));
5850 set_value (dst_rid
, new_sid
, ctxt
);
5852 // TODO: presumably purge all child regions too (but do this in set_value?)
5857 /* Update this model for any phis in SNODE, assuming we came from
5858 LAST_CFG_SUPEREDGE. */
5861 region_model::update_for_phis (const supernode
*snode
,
5862 const cfg_superedge
*last_cfg_superedge
,
5863 region_model_context
*ctxt
)
5865 gcc_assert (last_cfg_superedge
);
5867 for (gphi_iterator gpi
= const_cast<supernode
*>(snode
)->start_phis ();
5868 !gsi_end_p (gpi
); gsi_next (&gpi
))
5870 gphi
*phi
= gpi
.phi ();
5872 tree src
= last_cfg_superedge
->get_phi_arg (phi
);
5873 tree lhs
= gimple_phi_result (phi
);
5875 /* Update next_state based on phi. */
5876 bool is_back_edge
= last_cfg_superedge
->back_edge_p ();
5877 handle_phi (phi
, lhs
, src
, is_back_edge
, ctxt
);
5881 /* Attempt to update this model for taking EDGE (where the last statement
5882 was LAST_STMT), returning true if the edge can be taken, false
5885 For CFG superedges where LAST_STMT is a conditional or a switch
5886 statement, attempt to add the relevant conditions for EDGE to this
5887 model, returning true if they are feasible, or false if they are
5890 For call superedges, push frame information and store arguments
5893 For return superedges, pop frame information and store return
5894 values into any lhs.
5896 Rejection of call/return superedges happens elsewhere, in
5897 program_point::on_edge (i.e. based on program point, rather
5898 than program state). */
5901 region_model::maybe_update_for_edge (const superedge
&edge
,
5902 const gimple
*last_stmt
,
5903 region_model_context
*ctxt
)
5905 /* Handle frame updates for interprocedural edges. */
5906 switch (edge
.m_kind
)
5911 case SUPEREDGE_CALL
:
5913 const call_superedge
*call_edge
= as_a
<const call_superedge
*> (&edge
);
5914 update_for_call_superedge (*call_edge
, ctxt
);
5918 case SUPEREDGE_RETURN
:
5920 const return_superedge
*return_edge
5921 = as_a
<const return_superedge
*> (&edge
);
5922 update_for_return_superedge (*return_edge
, ctxt
);
5926 case SUPEREDGE_INTRAPROCEDURAL_CALL
:
5928 const callgraph_superedge
*cg_sedge
5929 = as_a
<const callgraph_superedge
*> (&edge
);
5930 update_for_call_summary (*cg_sedge
, ctxt
);
5935 if (last_stmt
== NULL
)
5938 /* Apply any constraints for conditionals/switch statements. */
5940 if (const gcond
*cond_stmt
= dyn_cast
<const gcond
*> (last_stmt
))
5942 const cfg_superedge
*cfg_sedge
= as_a
<const cfg_superedge
*> (&edge
);
5943 return apply_constraints_for_gcond (*cfg_sedge
, cond_stmt
, ctxt
);
5946 if (const gswitch
*switch_stmt
= dyn_cast
<const gswitch
*> (last_stmt
))
5948 const switch_cfg_superedge
*switch_sedge
5949 = as_a
<const switch_cfg_superedge
*> (&edge
);
5950 return apply_constraints_for_gswitch (*switch_sedge
, switch_stmt
, ctxt
);
5956 /* Push a new frame_region on to the stack region.
5957 Populate the frame_region with child regions for the function call's
5958 parameters, using values from the arguments at the callsite in the
5962 region_model::update_for_call_superedge (const call_superedge
&call_edge
,
5963 region_model_context
*ctxt
)
5965 /* Build a vec of argument svalue_id, using the current top
5966 frame for resolving tree expressions. */
5967 const gcall
*call_stmt
= call_edge
.get_call_stmt ();
5968 auto_vec
<svalue_id
> arg_sids (gimple_call_num_args (call_stmt
));
5970 for (unsigned i
= 0; i
< gimple_call_num_args (call_stmt
); i
++)
5972 tree arg
= gimple_call_arg (call_stmt
, i
);
5973 arg_sids
.quick_push (get_rvalue (arg
, ctxt
));
5976 push_frame (call_edge
.get_callee_function (), &arg_sids
, ctxt
);
5979 /* Pop the top-most frame_region from the stack, and store the svalue
5980 for any returned value into the region for the lvalue of the LHS of
5981 the call (if any). */
5984 region_model::update_for_return_superedge (const return_superedge
&return_edge
,
5985 region_model_context
*ctxt
)
5988 svalue_id result_sid
= pop_frame (true, &stats
, ctxt
);
5989 // TODO: do something with the stats?
5991 if (result_sid
.null_p ())
5994 /* Set the result of the call, within the caller frame. */
5995 const gcall
*call_stmt
= return_edge
.get_call_stmt ();
5996 tree lhs
= gimple_call_lhs (call_stmt
);
5998 set_value (get_lvalue (lhs
, ctxt
), result_sid
, ctxt
);
6001 /* This could be a leak; try purging again, but this time,
6002 don't special-case the result_sid. */
6004 purge_unused_svalues (&stats
, ctxt
);
6008 /* Update this region_model with a summary of the effect of calling
6009 and returning from CG_SEDGE.
6011 TODO: Currently this is extremely simplistic: we merely set the
6012 return value to "unknown". A proper implementation would e.g. update
6013 sm-state, and presumably be reworked to support multiple outcomes. */
6016 region_model::update_for_call_summary (const callgraph_superedge
&cg_sedge
,
6017 region_model_context
*ctxt
)
6019 /* For now, set any return value to "unknown". */
6020 const gcall
*call_stmt
= cg_sedge
.get_call_stmt ();
6021 tree lhs
= gimple_call_lhs (call_stmt
);
6023 set_to_new_unknown_value (get_lvalue (lhs
, ctxt
), TREE_TYPE (lhs
), ctxt
);
6025 // TODO: actually implement some kind of summary here
6028 /* Given a true or false edge guarded by conditional statement COND_STMT,
6029 determine appropriate constraints for the edge to be taken.
6031 If they are feasible, add the constraints and return true.
6033 Return false if the constraints contradict existing knowledge
6034 (and so the edge should not be taken). */
6037 region_model::apply_constraints_for_gcond (const cfg_superedge
&sedge
,
6038 const gcond
*cond_stmt
,
6039 region_model_context
*ctxt
)
6041 ::edge cfg_edge
= sedge
.get_cfg_edge ();
6042 gcc_assert (cfg_edge
!= NULL
);
6043 gcc_assert (cfg_edge
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
));
6045 enum tree_code op
= gimple_cond_code (cond_stmt
);
6046 tree lhs
= gimple_cond_lhs (cond_stmt
);
6047 tree rhs
= gimple_cond_rhs (cond_stmt
);
6048 if (cfg_edge
->flags
& EDGE_FALSE_VALUE
)
6049 op
= invert_tree_comparison (op
, false /* honor_nans */);
6050 return add_constraint (lhs
, op
, rhs
, ctxt
);
6053 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
6054 for the edge to be taken.
6056 If they are feasible, add the constraints and return true.
6058 Return false if the constraints contradict existing knowledge
6059 (and so the edge should not be taken). */
6062 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge
&edge
,
6063 const gswitch
*switch_stmt
,
6064 region_model_context
*ctxt
)
6066 tree index
= gimple_switch_index (switch_stmt
);
6067 tree case_label
= edge
.get_case_label ();
6068 gcc_assert (TREE_CODE (case_label
) == CASE_LABEL_EXPR
);
6069 tree lower_bound
= CASE_LOW (case_label
);
6070 tree upper_bound
= CASE_HIGH (case_label
);
6076 if (!add_constraint (index
, GE_EXPR
, lower_bound
, ctxt
))
6078 return add_constraint (index
, LE_EXPR
, upper_bound
, ctxt
);
6082 return add_constraint (index
, EQ_EXPR
, lower_bound
, ctxt
);
6086 /* The default case.
6087 Add exclusions based on the other cases. */
6088 for (unsigned other_idx
= 1;
6089 other_idx
< gimple_switch_num_labels (switch_stmt
);
6092 tree other_label
= gimple_switch_label (switch_stmt
,
6094 tree other_lower_bound
= CASE_LOW (other_label
);
6095 tree other_upper_bound
= CASE_HIGH (other_label
);
6096 gcc_assert (other_lower_bound
);
6097 if (other_upper_bound
)
6099 /* Exclude this range-valued case.
6100 For now, we just exclude the boundary values.
6101 TODO: exclude the values within the region. */
6102 if (!add_constraint (index
, NE_EXPR
, other_lower_bound
, ctxt
))
6104 if (!add_constraint (index
, NE_EXPR
, other_upper_bound
, ctxt
))
6108 /* Exclude this single-valued case. */
6109 if (!add_constraint (index
, NE_EXPR
, other_lower_bound
, ctxt
))
6116 /* Get the root_region within this model (guaranteed to be non-null). */
6119 region_model::get_root_region () const
6121 return get_region
<root_region
> (m_root_rid
);
6124 /* Get the region_id of this model's stack region (if any). */
6127 region_model::get_stack_region_id () const
6129 return get_root_region ()->get_stack_region_id ();
6132 /* Create a new frame_region for a call to FUN and push it onto
6135 If ARG_SIDS is non-NULL, use it to populate the parameters
6137 Otherwise, populate them with unknown values.
6139 Return the region_id of the new frame_region. */
6142 region_model::push_frame (function
*fun
, vec
<svalue_id
> *arg_sids
,
6143 region_model_context
*ctxt
)
6145 return get_root_region ()->push_frame (this, fun
, arg_sids
, ctxt
);
6148 /* Get the region_id of the top-most frame in this region_model's stack,
6152 region_model::get_current_frame_id () const
6154 return get_root_region ()->get_current_frame_id (*this);
6157 /* Get the function of the top-most frame in this region_model's stack.
6158 There must be such a frame. */
6161 region_model::get_current_function () const
6163 region_id frame_id
= get_current_frame_id ();
6164 frame_region
*frame
= get_region
<frame_region
> (frame_id
);
6165 return frame
->get_function ();
6168 /* Pop the topmost frame_region from this region_model's stack;
6169 see the comment for stack_region::pop_frame. */
6172 region_model::pop_frame (bool purge
, purge_stats
*out
,
6173 region_model_context
*ctxt
)
6175 return get_root_region ()->pop_frame (this, purge
, out
, ctxt
);
6178 /* Get the number of frames in this region_model's stack. */
6181 region_model::get_stack_depth () const
6183 stack_region
*stack
= get_root_region ()->get_stack_region (this);
6185 return stack
->get_num_frames ();
6190 /* Get the function * at DEPTH within the call stack. */
6193 region_model::get_function_at_depth (unsigned depth
) const
6195 stack_region
*stack
= get_root_region ()->get_stack_region (this);
6197 region_id frame_rid
= stack
->get_frame_rid (depth
);
6198 frame_region
*frame
= get_region
<frame_region
> (frame_rid
);
6199 return frame
->get_function ();
6202 /* Get the region_id of this model's globals region (if any). */
6205 region_model::get_globals_region_id () const
6207 return get_root_region ()->get_globals_region_id ();
6210 /* Add SVAL to this model, taking ownership, and returning its new
6214 region_model::add_svalue (svalue
*sval
)
6217 m_svalues
.safe_push (sval
);
6218 return svalue_id::from_int (m_svalues
.length () - 1);
6221 /* Change the meaning of SID to be NEW_SVAL
6222 (e.g. when deferencing an unknown pointer, the pointer
6223 becomes a pointer to a symbolic region, so that all users
6224 of the former unknown pointer are now effectively pointing
6225 at the same region). */
6228 region_model::replace_svalue (svalue_id sid
, svalue
*new_sval
)
6230 gcc_assert (!sid
.null_p ());
6231 int idx
= sid
.as_int ();
6233 gcc_assert (m_svalues
[idx
]);
6234 gcc_assert (m_svalues
[idx
]->get_type () == new_sval
->get_type ());
6235 delete m_svalues
[idx
];
6237 m_svalues
[idx
] = new_sval
;
6240 /* Add region R to this model, taking ownership, and returning its new
6244 region_model::add_region (region
*r
)
6247 m_regions
.safe_push (r
);
6248 return region_id::from_int (m_regions
.length () - 1);
6251 /* Return the svalue with id SVAL_ID, or NULL for a null id. */
6254 region_model::get_svalue (svalue_id sval_id
) const
6256 if (sval_id
.null_p ())
6258 return m_svalues
[sval_id
.as_int ()];
6261 /* Return the region with id RID, or NULL for a null id. */
6264 region_model::get_region (region_id rid
) const
6268 return m_regions
[rid
.as_int ()];
6271 /* Make a region of an appropriate subclass for TYPE,
6272 with parent PARENT_RID, or return NULL for types we don't yet know
6276 make_region_for_type (region_id parent_rid
, tree type
)
6278 gcc_assert (TYPE_P (type
));
6280 if (INTEGRAL_TYPE_P (type
)
6281 || SCALAR_FLOAT_TYPE_P (type
)
6282 || POINTER_TYPE_P (type
)
6283 || TREE_CODE (type
) == COMPLEX_TYPE
6284 || TREE_CODE (type
) == VECTOR_TYPE
)
6285 return new primitive_region (parent_rid
, type
);
6287 if (TREE_CODE (type
) == RECORD_TYPE
)
6288 return new struct_region (parent_rid
, type
);
6290 if (TREE_CODE (type
) == ARRAY_TYPE
)
6291 return new array_region (parent_rid
, type
);
6293 if (TREE_CODE (type
) == UNION_TYPE
)
6294 return new union_region (parent_rid
, type
);
6296 if (FUNC_OR_METHOD_TYPE_P (type
))
6297 return new function_region (parent_rid
, type
);
6299 /* If we have a void *, make a new symbolic region. */
6300 if (VOID_TYPE_P (type
))
6301 return new symbolic_region (parent_rid
, type
, false);
6306 /* Add a region with type TYPE and parent PARENT_RID. */
6309 region_model::add_region_for_type (region_id parent_rid
, tree type
,
6310 region_model_context
*ctxt
)
6312 gcc_assert (TYPE_P (type
));
6314 if (region
*new_region
= make_region_for_type (parent_rid
, type
))
6315 return add_region (new_region
);
6317 /* If we can't handle TYPE, return a placeholder region, and stop
6318 exploring this path. */
6319 return make_region_for_unexpected_tree_code (ctxt
, type
,
6320 dump_location_t ());
6323 /* Helper class for region_model::purge_unused_svalues. */
6325 class restrict_to_used_svalues
: public purge_criteria
6328 restrict_to_used_svalues (const auto_sbitmap
&used
) : m_used (used
) {}
6330 bool should_purge_p (svalue_id sid
) const FINAL OVERRIDE
6332 gcc_assert (!sid
.null_p ());
6333 return !bitmap_bit_p (m_used
, sid
.as_int ());
6337 const auto_sbitmap
&m_used
;
6340 /* Remove unused svalues from this model, accumulating stats into STATS.
6341 Unused svalues are deleted. Doing so could reorder the svalues, and
6342 thus change the meaning of svalue_ids.
6344 If CTXT is non-NULL, then it is notified about svalue_id remappings,
6345 and about svalue_ids that are about to be deleted. This allows e.g.
6346 for warning about resource leaks, for the case where the svalue
6347 represents a resource handle in the user code (e.g. a FILE * or a malloc
6350 Amongst other things, removing unused svalues is important for ensuring
6351 that the analysis of loops terminates. Otherwise, we could generate a
6352 succession of models with unreferenced "unknown" values, where the
6353 number of redundant unknown values could grow without bounds, and each
6354 such model would be treated as distinct.
6356 If KNOWN_USED is non-NULL, treat *KNOWN_USED as used (this is for
6357 handling values being returned from functions as their frame is popped,
6358 since otherwise we'd have to simultaneously determine both the rvalue
6359 of the return expr in the callee frame and the lvalue for the gcall's
6360 assignment in the caller frame, and it seems cleaner to express all
6361 lvalue and rvalue lookups implicitly relative to a "current" frame). */
6364 region_model::purge_unused_svalues (purge_stats
*stats
,
6365 region_model_context
*ctxt
,
6366 svalue_id
*known_used_sid
)
6368 // TODO: might want to avoid a vfunc call just to do logging here:
6369 logger
*logger
= ctxt
? ctxt
->get_logger () : NULL
;
6373 auto_sbitmap
used (m_svalues
.length ());
6374 bitmap_clear (used
);
6377 if (!known_used_sid
->null_p ())
6378 bitmap_set_bit (used
, known_used_sid
->as_int ());
6380 /* Walk the regions, marking sids that are used. */
6383 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
6385 svalue_id sid
= r
->get_value_direct ();
6387 bitmap_set_bit (used
, sid
.as_int ());
6390 /* Now purge any constraints involving svalues we don't care about. */
6391 restrict_to_used_svalues
criterion (used
);
6392 m_constraints
->purge (criterion
, stats
);
6394 /* Mark any sids that are in constraints that survived. */
6397 FOR_EACH_VEC_ELT (m_constraints
->m_equiv_classes
, i
, ec
)
6401 FOR_EACH_VEC_ELT (ec
->m_vars
, j
, sid
)
6403 gcc_assert (!sid
->null_p ());
6404 bitmap_set_bit (used
, sid
->as_int ());
6409 /* Build a mapping from old-sid to new-sid so that we can preserve
6410 order of the used IDs and move all redundant ones to the end.
6411 Iterate though svalue IDs, adding used ones to the front of
6412 the new list, and unused ones to the back. */
6413 svalue_id_map
map (m_svalues
.length ());
6414 int next_used_new_sid
= 0;
6415 int after_next_unused_new_sid
= m_svalues
.length ();
6416 for (unsigned i
= 0; i
< m_svalues
.length (); i
++)
6418 svalue_id
src (svalue_id::from_int (i
));
6419 if (bitmap_bit_p (used
, i
))
6422 logger
->log ("sv%i is used", i
);
6423 map
.put (src
, svalue_id::from_int (next_used_new_sid
++));
6428 logger
->log ("sv%i is unused", i
);
6429 map
.put (src
, svalue_id::from_int (--after_next_unused_new_sid
));
6432 /* The two insertion points should have met. */
6433 gcc_assert (next_used_new_sid
== after_next_unused_new_sid
);
6435 /* Now walk the regions and the constraints, remapping sids,
6436 so that all the redundant svalues are at the end. */
6437 remap_svalue_ids (map
);
6441 logger
->start_log_line ();
6442 logger
->log_partial ("map: ");
6443 map
.dump_to_pp (logger
->get_printer ());
6444 logger
->end_log_line ();
6447 /* Notify any client about the remapping and pending deletion.
6448 Potentially this could trigger leak warnings. */
6451 ctxt
->remap_svalue_ids (map
);
6452 int num_client_items_purged
6453 = ctxt
->on_svalue_purge (svalue_id::from_int (next_used_new_sid
), map
);
6455 stats
->m_num_client_items
+= num_client_items_purged
;
6458 /* Drop the redundant svalues from the end of the vector. */
6459 while ((signed)m_svalues
.length () > next_used_new_sid
)
6463 svalue_id victim
= svalue_id::from_int (m_svalues
.length () - 1);
6464 logger
->log ("deleting sv%i (was sv%i)",
6466 map
.get_src_for_dst (victim
).as_int ());
6468 delete m_svalues
.pop ();
6470 stats
->m_num_svalues
++;
6474 map
.update (known_used_sid
);
6479 /* Renumber the svalues within this model according to MAP. */
6482 region_model::remap_svalue_ids (const svalue_id_map
&map
)
6484 /* Update IDs within regions. */
6487 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
6488 r
->remap_svalue_ids (map
);
6490 /* Update IDs within ECs within constraints. */
6491 m_constraints
->remap_svalue_ids (map
);
6493 /* Build a reordered svalues vector. */
6494 auto_vec
<svalue
*> new_svalues (m_svalues
.length ());
6495 for (unsigned i
= 0; i
< m_svalues
.length (); i
++)
6497 svalue_id
dst (svalue_id::from_int (i
));
6498 svalue_id src
= map
.get_src_for_dst (dst
);
6499 new_svalues
.quick_push (get_svalue (src
));
6502 /* Copy over the reordered vec to m_svalues. */
6503 m_svalues
.truncate (0);
6504 gcc_assert (m_svalues
.space (new_svalues
.length ()));
6506 FOR_EACH_VEC_ELT (new_svalues
, i
, sval
)
6507 m_svalues
.quick_push (sval
);
6510 /* Renumber the regions within this model according to MAP. */
6513 region_model::remap_region_ids (const region_id_map
&map
)
6515 /* Update IDs within regions. */
6518 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
6519 r
->remap_region_ids (map
);
6521 /* Update IDs within svalues. */
6523 FOR_EACH_VEC_ELT (m_svalues
, i
, sval
)
6524 sval
->remap_region_ids (map
);
6526 /* Build a reordered regions vector. */
6527 auto_vec
<region
*> new_regions (m_regions
.length ());
6528 for (unsigned i
= 0; i
< m_regions
.length (); i
++)
6530 region_id
dst (region_id::from_int (i
));
6531 region_id src
= map
.get_src_for_dst (dst
);
6532 new_regions
.quick_push (get_region (src
));
6535 /* Copy over the reordered vec to m_regions. */
6536 m_regions
.truncate (0);
6537 gcc_assert (m_regions
.space (new_regions
.length ()));
6538 FOR_EACH_VEC_ELT (new_regions
, i
, r
)
6539 m_regions
.quick_push (r
);
6542 /* Delete all regions within SET_TO_PURGE, remapping region IDs for
6543 other regions. It's required that there are no uses of the
6544 regions within the set (or the region IDs will become invalid).
6546 Accumulate stats to STATS. */
6549 region_model::purge_regions (const region_id_set
&set_to_purge
,
6553 /* Build a mapping from old-rid to new-rid so that we can preserve
6554 order of the used IDs and move all redundant ones to the end.
6555 Iterate though region IDs, adding used ones to the front of
6556 the new list, and unused ones to the back. */
6557 region_id_map
map (m_regions
.length ());
6558 int next_used_new_rid
= 0;
6559 int after_next_unused_new_rid
= m_regions
.length ();
6560 for (unsigned i
= 0; i
< m_regions
.length (); i
++)
6562 region_id
src (region_id::from_int (i
));
6563 if (set_to_purge
.region_p (src
))
6564 map
.put (src
, region_id::from_int (--after_next_unused_new_rid
));
6566 map
.put (src
, region_id::from_int (next_used_new_rid
++));
6568 /* The two insertion points should have met. */
6569 gcc_assert (next_used_new_rid
== after_next_unused_new_rid
);
6571 /* Now walk the regions and svalues, remapping rids,
6572 so that all the redundant regions are at the end. */
6573 remap_region_ids (map
);
6575 /* Drop the redundant regions from the end of the vector. */
6576 while ((signed)m_regions
.length () > next_used_new_rid
)
6578 delete m_regions
.pop ();
6580 stats
->m_num_regions
++;
6584 /* Populate *OUT with RID and all of its descendents.
6585 If EXCLUDE_RID is non-null, then don't add it or its descendents. */
6588 region_model::get_descendents (region_id rid
, region_id_set
*out
,
6589 region_id exclude_rid
) const
6591 out
->add_region (rid
);
6593 bool changed
= true;
6599 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
6601 region_id iter_rid
= region_id::from_int (i
);
6602 if (iter_rid
== exclude_rid
)
6604 if (!out
->region_p (iter_rid
))
6606 region_id parent_rid
= r
->get_parent ();
6607 if (!parent_rid
.null_p ())
6608 if (out
->region_p (parent_rid
))
6610 out
->add_region (iter_rid
);
6618 /* Delete RID and all descendent regions.
6619 Find any pointers to such regions; convert them to
6620 poisoned values of kind PKIND.
6621 Accumulate stats on purged entities into STATS. */
6624 region_model::delete_region_and_descendents (region_id rid
,
6625 enum poison_kind pkind
,
6629 /* Find all child and descendent regions. */
6630 region_id_set
descendents (this);
6631 get_descendents (rid
, &descendents
, region_id::null ());
6633 /* Find any pointers to such regions; convert to poisoned. */
6634 poison_any_pointers_to_bad_regions (descendents
, pkind
);
6636 /* Delete all such regions. */
6637 purge_regions (descendents
, stats
, logger
);
6640 /* Find any pointers to regions within BAD_REGIONS; convert them to
6641 poisoned values of kind PKIND. */
6644 region_model::poison_any_pointers_to_bad_regions (const region_id_set
&
6646 enum poison_kind pkind
)
6650 FOR_EACH_VEC_ELT (m_svalues
, i
, sval
)
6651 if (region_svalue
*ptr_sval
= sval
->dyn_cast_region_svalue ())
6653 region_id ptr_dst
= ptr_sval
->get_pointee ();
6654 if (!ptr_dst
.null_p ())
6655 if (bad_regions
.region_p (ptr_dst
))
6657 (svalue_id::from_int (i
),
6658 new poisoned_svalue (pkind
, sval
->get_type ()));
6662 /* Attempt to merge THIS with OTHER_MODEL, writing the result
6663 to OUT_MODEL, and populating SID_MAPPING. */
6666 region_model::can_merge_with_p (const region_model
&other_model
,
6667 region_model
*out_model
,
6668 svalue_id_merger_mapping
*sid_mapping
) const
6670 gcc_assert (m_root_rid
== other_model
.m_root_rid
);
6671 gcc_assert (m_root_rid
.as_int () == 0);
6672 gcc_assert (sid_mapping
);
6673 gcc_assert (out_model
);
6675 model_merger
merger (this, &other_model
, out_model
, sid_mapping
);
6677 if (!root_region::can_merge_p (get_root_region (),
6678 other_model
.get_root_region (),
6679 out_model
->get_root_region (),
6683 /* Merge constraints. */
6684 constraint_manager::merge (*m_constraints
,
6685 *other_model
.m_constraints
,
6686 out_model
->m_constraints
,
6689 out_model
->validate ();
6691 /* The merged model should be simpler (or as simple) as the inputs. */
6693 gcc_assert (out_model
->m_svalues
.length () <= m_svalues
.length ());
6694 gcc_assert (out_model
->m_svalues
.length ()
6695 <= other_model
.m_svalues
.length ());
6697 gcc_assert (out_model
->m_regions
.length () <= m_regions
.length ());
6698 gcc_assert (out_model
->m_regions
.length ()
6699 <= other_model
.m_regions
.length ());
6700 // TODO: same, for constraints
6705 /* As above, but supply a placeholder svalue_id_merger_mapping
6706 instance to be used and receive output. For use in selftests. */
6709 region_model::can_merge_with_p (const region_model
&other_model
,
6710 region_model
*out_model
) const
6712 svalue_id_merger_mapping
sid_mapping (*this, other_model
);
6713 return can_merge_with_p (other_model
, out_model
, &sid_mapping
);
6716 /* For debugging purposes: look for a region within this region_model
6717 for a decl named NAME (or an SSA_NAME for such a decl),
6718 returning its value, or svalue_id::null if none are found. */
6721 region_model::get_value_by_name (const char *name
) const
6724 tree identifier
= get_identifier (name
);
6725 return get_root_region ()->get_value_by_name (identifier
, *this);
6728 /* Generate or reuse an svalue_id within this model for an index
6729 into an array of type PTR_TYPE, based on OFFSET_SID. */
6732 region_model::convert_byte_offset_to_array_index (tree ptr_type
,
6733 svalue_id offset_sid
)
6735 gcc_assert (POINTER_TYPE_P (ptr_type
));
6737 if (tree offset_cst
= maybe_get_constant (offset_sid
))
6739 tree elem_type
= TREE_TYPE (ptr_type
);
6741 /* Arithmetic on void-pointers is a GNU C extension, treating the size
6743 https://gcc.gnu.org/onlinedocs/gcc/Pointer-Arith.html */
6744 if (TREE_CODE (elem_type
) == VOID_TYPE
)
6747 /* First, use int_size_in_bytes, to reject the case where we have an
6748 incomplete type, or a non-constant value. */
6749 HOST_WIDE_INT hwi_byte_size
= int_size_in_bytes (elem_type
);
6750 if (hwi_byte_size
> 0)
6752 /* Now call size_in_bytes to get the answer in tree form. */
6753 tree byte_size
= size_in_bytes (elem_type
);
6754 gcc_assert (byte_size
);
6755 /* Try to get a constant by dividing, ensuring that we're in a
6756 signed representation first. */
6758 = fold_binary (TRUNC_DIV_EXPR
, ssizetype
,
6759 fold_convert (ssizetype
, offset_cst
),
6760 fold_convert (ssizetype
, byte_size
));
6761 if (index
&& TREE_CODE (index
) == INTEGER_CST
)
6762 return get_or_create_constant_svalue (index
);
6766 /* Otherwise, we don't know the array index; generate a new unknown value.
6767 TODO: do we need to capture the relationship between two unknown
6768 values (the offset and the index)? */
6769 return add_svalue (new unknown_svalue (integer_type_node
));
6772 /* Get a region of type TYPE for PTR_SID[OFFSET_SID/sizeof (*PTR_SID)].
6774 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6775 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6776 and then get a view of type TYPE on the relevant array element. */
6779 region_model::get_or_create_mem_ref (tree type
,
6781 svalue_id offset_sid
,
6782 region_model_context
*ctxt
)
6784 svalue
*ptr_sval
= get_svalue (ptr_sid
);
6785 tree ptr_type
= ptr_sval
->get_type ();
6786 gcc_assert (ptr_type
);
6788 region_id raw_rid
= deref_rvalue (ptr_sid
, ctxt
);
6790 svalue
*offset_sval
= get_svalue (offset_sid
);
6791 tree offset_type
= offset_sval
->get_type ();
6792 gcc_assert (offset_type
);
6794 if (constant_svalue
*cst_sval
= offset_sval
->dyn_cast_constant_svalue ())
6796 if (zerop (cst_sval
->get_constant ()))
6798 /* Handle the zero offset case. */
6799 return get_or_create_view (raw_rid
, type
, ctxt
);
6802 /* If we're already within an array of the correct type,
6803 then we want to reuse that array, rather than starting
6805 If so, figure out our raw_rid's offset from its parent,
6806 if we can, and use that to offset OFFSET_SID, and create
6807 the element within the parent region. */
6808 region
*raw_reg
= get_region (raw_rid
);
6809 region_id parent_rid
= raw_reg
->get_parent ();
6810 tree parent_type
= get_region (parent_rid
)->get_type ();
6812 && TREE_CODE (parent_type
) == ARRAY_TYPE
)
6814 // TODO: check we have the correct parent type
6815 array_region
*parent_array
= get_region
<array_region
> (parent_rid
);
6816 array_region::key_t key_for_raw_rid
;
6817 if (parent_array
->get_key_for_child_region (raw_rid
,
6820 /* Convert from offset to index. */
6822 = convert_byte_offset_to_array_index (ptr_type
, offset_sid
);
6824 = get_svalue (index_sid
)->maybe_get_constant ())
6826 array_region::key_t index_offset
6827 = array_region::key_from_constant (index_cst
);
6828 array_region::key_t index_rel_to_parent
6829 = key_for_raw_rid
+ index_offset
;
6830 tree index_rel_to_parent_cst
6831 = wide_int_to_tree (integer_type_node
,
6832 index_rel_to_parent
);
6834 = get_or_create_constant_svalue (index_rel_to_parent_cst
);
6836 /* Carry on, using the parent region and adjusted index. */
6837 region_id element_rid
6838 = parent_array
->get_element (this, raw_rid
, index_sid
,
6840 return get_or_create_view (element_rid
, type
, ctxt
);
6846 tree array_type
= build_array_type (TREE_TYPE (ptr_type
),
6848 region_id array_view_rid
= get_or_create_view (raw_rid
, array_type
, ctxt
);
6849 array_region
*array_reg
= get_region
<array_region
> (array_view_rid
);
6852 = convert_byte_offset_to_array_index (ptr_type
, offset_sid
);
6854 region_id element_rid
6855 = array_reg
->get_element (this, array_view_rid
, index_sid
, ctxt
);
6857 return get_or_create_view (element_rid
, type
, ctxt
);
6860 /* Get a region of type TYPE for PTR_SID + OFFSET_SID.
6862 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6863 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6864 and then get a view of type TYPE on the relevant array element. */
6867 region_model::get_or_create_pointer_plus_expr (tree type
,
6869 svalue_id offset_in_bytes_sid
,
6870 region_model_context
*ctxt
)
6872 return get_or_create_mem_ref (type
,
6874 offset_in_bytes_sid
,
6878 /* Get or create a view of type TYPE of the region with id RAW_ID.
6879 Return the id of the view (or RAW_ID if it of the same type). */
6882 region_model::get_or_create_view (region_id raw_rid
, tree type
,
6883 region_model_context
*ctxt
)
6885 region
*raw_region
= get_region (raw_rid
);
6887 gcc_assert (TYPE_P (type
));
6888 if (type
!= raw_region
->get_type ())
6890 /* If the region already has a view of the requested type,
6892 region_id existing_view_rid
= raw_region
->get_view (type
, this);
6893 if (!existing_view_rid
.null_p ())
6894 return existing_view_rid
;
6896 /* Otherwise, make one (adding it to the region_model and
6897 to the viewed region). */
6898 region_id view_rid
= add_region_for_type (raw_rid
, type
, ctxt
);
6899 raw_region
->add_view (view_rid
, this);
6900 // TODO: something to signify that this is a "view"
6907 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6911 region_model::get_fndecl_for_call (const gcall
*call
,
6912 region_model_context
*ctxt
)
6914 tree fn_ptr
= gimple_call_fn (call
);
6915 if (fn_ptr
== NULL_TREE
)
6917 svalue_id fn_ptr_sid
= get_rvalue (fn_ptr
, ctxt
);
6918 svalue
*fn_ptr_sval
= get_svalue (fn_ptr_sid
);
6919 if (region_svalue
*fn_ptr_ptr
= fn_ptr_sval
->dyn_cast_region_svalue ())
6921 region_id fn_rid
= fn_ptr_ptr
->get_pointee ();
6922 code_region
*code
= get_root_region ()->get_code_region (this);
6925 tree fn_decl
= code
->get_tree_for_child_region (fn_rid
);
6928 cgraph_node
*node
= cgraph_node::get (fn_decl
);
6931 const cgraph_node
*ultimate_node
= node
->ultimate_alias_target ();
6933 return ultimate_node
->decl
;
6940 /* struct model_merger. */
6942 /* Dump a multiline representation of this merger to PP. */
6945 model_merger::dump_to_pp (pretty_printer
*pp
) const
6947 pp_string (pp
, "model A:");
6949 m_model_a
->dump_to_pp (pp
, false);
6952 pp_string (pp
, "model B:");
6954 m_model_b
->dump_to_pp (pp
, false);
6957 pp_string (pp
, "merged model:");
6959 m_merged_model
->dump_to_pp (pp
, false);
6962 pp_string (pp
, "region map: model A to merged model:");
6964 m_map_regions_from_a_to_m
.dump_to_pp (pp
);
6967 pp_string (pp
, "region map: model B to merged model:");
6969 m_map_regions_from_b_to_m
.dump_to_pp (pp
);
6972 m_sid_mapping
->dump_to_pp (pp
);
6975 /* Dump a multiline representation of this merger to FILE. */
6978 model_merger::dump (FILE *fp
) const
6981 pp_format_decoder (&pp
) = default_tree_printer
;
6982 pp_show_color (&pp
) = pp_show_color (global_dc
->printer
);
6983 pp
.buffer
->stream
= fp
;
6988 /* Dump a multiline representation of this merger to stderr. */
6991 model_merger::dump () const
6996 /* Attempt to merge the svalues of SID_A and SID_B (from their
6997 respective models), writing the id of the resulting svalue
6999 Return true if the merger is possible, false otherwise. */
7002 model_merger::can_merge_values_p (svalue_id sid_a
,
7004 svalue_id
*merged_sid
)
7006 gcc_assert (merged_sid
);
7007 svalue
*sval_a
= m_model_a
->get_svalue (sid_a
);
7008 svalue
*sval_b
= m_model_b
->get_svalue (sid_b
);
7010 /* If both are NULL, then the "values" are trivially mergeable. */
7011 if (!sval_a
&& !sval_b
)
7014 /* If one is NULL and the other non-NULL, then the "values"
7015 are not mergeable. */
7016 if (!(sval_a
&& sval_b
))
7019 /* Have they both already been mapped to the same new svalue_id?
7021 svalue_id sid_a_in_m
7022 = m_sid_mapping
->m_map_from_a_to_m
.get_dst_for_src (sid_a
);
7023 svalue_id sid_b_in_m
7024 = m_sid_mapping
->m_map_from_b_to_m
.get_dst_for_src (sid_b
);
7025 if (!sid_a_in_m
.null_p ()
7026 && !sid_b_in_m
.null_p ()
7027 && sid_a_in_m
== sid_b_in_m
)
7029 *merged_sid
= sid_a_in_m
;
7033 tree type
= sval_a
->get_type ();
7034 if (type
== NULL_TREE
)
7035 type
= sval_b
->get_type ();
7037 /* If the values have different kinds, or are both unknown,
7038 then merge as "unknown". */
7039 if (sval_a
->get_kind () != sval_b
->get_kind ()
7040 || sval_a
->get_kind () == SK_UNKNOWN
)
7042 svalue
*merged_sval
= new unknown_svalue (type
);
7043 *merged_sid
= m_merged_model
->add_svalue (merged_sval
);
7044 record_svalues (sid_a
, sid_b
, *merged_sid
);
7048 gcc_assert (sval_a
->get_kind () == sval_b
->get_kind ());
7050 switch (sval_a
->get_kind ())
7053 case SK_UNKNOWN
: /* SK_UNKNOWN handled above. */
7058 /* If we have two region pointers, then we can merge (possibly to
7060 const region_svalue
®ion_sval_a
= *as_a
<region_svalue
*> (sval_a
);
7061 const region_svalue
®ion_sval_b
= *as_a
<region_svalue
*> (sval_b
);
7062 region_svalue::merge_values (region_sval_a
, region_sval_b
,
7065 record_svalues (sid_a
, sid_b
, *merged_sid
);
7071 /* If we have two constants, then we can merge. */
7072 const constant_svalue
&cst_sval_a
= *as_a
<constant_svalue
*> (sval_a
);
7073 const constant_svalue
&cst_sval_b
= *as_a
<constant_svalue
*> (sval_b
);
7074 constant_svalue::merge_values (cst_sval_a
, cst_sval_b
,
7076 record_svalues (sid_a
, sid_b
, *merged_sid
);
7087 /* Record that A_RID in model A and B_RID in model B
7088 correspond to MERGED_RID in the merged model, so
7089 that pointers can be accurately merged. */
7092 model_merger::record_regions (region_id a_rid
,
7094 region_id merged_rid
)
7096 m_map_regions_from_a_to_m
.put (a_rid
, merged_rid
);
7097 m_map_regions_from_b_to_m
.put (b_rid
, merged_rid
);
7100 /* Record that A_SID in model A and B_SID in model B
7101 correspond to MERGED_SID in the merged model. */
7104 model_merger::record_svalues (svalue_id a_sid
,
7106 svalue_id merged_sid
)
7108 gcc_assert (m_sid_mapping
);
7109 m_sid_mapping
->m_map_from_a_to_m
.put (a_sid
, merged_sid
);
7110 m_sid_mapping
->m_map_from_b_to_m
.put (b_sid
, merged_sid
);
7113 /* struct svalue_id_merger_mapping. */
7115 /* svalue_id_merger_mapping's ctor. */
7117 svalue_id_merger_mapping::svalue_id_merger_mapping (const region_model
&a
,
7118 const region_model
&b
)
7119 : m_map_from_a_to_m (a
.get_num_svalues ()),
7120 m_map_from_b_to_m (b
.get_num_svalues ())
7124 /* Dump a multiline representation of this to PP. */
7127 svalue_id_merger_mapping::dump_to_pp (pretty_printer
*pp
) const
7129 pp_string (pp
, "svalue_id map: model A to merged model:");
7131 m_map_from_a_to_m
.dump_to_pp (pp
);
7134 pp_string (pp
, "svalue_id map: model B to merged model:");
7136 m_map_from_b_to_m
.dump_to_pp (pp
);
7140 /* Dump a multiline representation of this to FILE. */
7143 svalue_id_merger_mapping::dump (FILE *fp
) const
7146 pp_format_decoder (&pp
) = default_tree_printer
;
7147 pp_show_color (&pp
) = pp_show_color (global_dc
->printer
);
7148 pp
.buffer
->stream
= fp
;
7153 /* Dump a multiline representation of this to stderr. */
7156 svalue_id_merger_mapping::dump () const
7161 /* struct canonicalization. */
7163 /* canonicalization's ctor. */
7165 canonicalization::canonicalization (const region_model
&model
)
7167 m_rid_map (model
.get_num_regions ()),
7168 m_sid_map (model
.get_num_svalues ()),
7174 /* If we've not seen RID yet, assign it a canonicalized region_id,
7175 and walk the region's svalue and then the region. */
7178 canonicalization::walk_rid (region_id rid
)
7180 /* Stop if we've already seen RID. */
7181 if (!m_rid_map
.get_dst_for_src (rid
).null_p ())
7184 region
*region
= m_model
.get_region (rid
);
7187 m_rid_map
.put (rid
, region_id::from_int (m_next_rid_int
++));
7188 walk_sid (region
->get_value_direct ());
7189 region
->walk_for_canonicalization (this);
7193 /* If we've not seen SID yet, assign it a canonicalized svalue_id,
7194 and walk the svalue (and potentially regions e.g. for ptr values). */
7197 canonicalization::walk_sid (svalue_id sid
)
7199 /* Stop if we've already seen SID. */
7200 if (!m_sid_map
.get_dst_for_src (sid
).null_p ())
7203 svalue
*sval
= m_model
.get_svalue (sid
);
7206 m_sid_map
.put (sid
, svalue_id::from_int (m_next_sid_int
++));
7207 /* Potentially walk regions e.g. for ptrs. */
7208 sval
->walk_for_canonicalization (this);
7212 /* Dump a multiline representation of this to PP. */
7215 canonicalization::dump_to_pp (pretty_printer
*pp
) const
7217 pp_string (pp
, "region_id map:");
7219 m_rid_map
.dump_to_pp (pp
);
7222 pp_string (pp
, "svalue_id map:");
7224 m_sid_map
.dump_to_pp (pp
);
7228 /* Dump a multiline representation of this to FILE. */
7231 canonicalization::dump (FILE *fp
) const
7234 pp_format_decoder (&pp
) = default_tree_printer
;
7235 pp_show_color (&pp
) = pp_show_color (global_dc
->printer
);
7236 pp
.buffer
->stream
= fp
;
7241 /* Dump a multiline representation of this to stderr. */
7244 canonicalization::dump () const
7251 /* Update HSTATE with a hash of SID. */
7254 inchash::add (svalue_id sid
, inchash::hash
&hstate
)
7256 hstate
.add_int (sid
.as_int ());
7259 /* Update HSTATE with a hash of RID. */
7262 inchash::add (region_id rid
, inchash::hash
&hstate
)
7264 hstate
.add_int (rid
.as_int ());
7267 /* Dump RMODEL fully to stderr (i.e. without summarization). */
7270 debug (const region_model
&rmodel
)
7272 rmodel
.dump (false);
7279 namespace selftest
{
7281 /* Build a constant tree of the given type from STR. */
7284 build_real_cst_from_string (tree type
, const char *str
)
7286 REAL_VALUE_TYPE real
;
7287 real_from_string (&real
, str
);
7288 return build_real (type
, real
);
7291 /* Append various "interesting" constants to OUT (e.g. NaN). */
7294 append_interesting_constants (auto_vec
<tree
> *out
)
7296 out
->safe_push (build_int_cst (integer_type_node
, 0));
7297 out
->safe_push (build_int_cst (integer_type_node
, 42));
7298 out
->safe_push (build_int_cst (unsigned_type_node
, 0));
7299 out
->safe_push (build_int_cst (unsigned_type_node
, 42));
7300 out
->safe_push (build_real_cst_from_string (float_type_node
, "QNaN"));
7301 out
->safe_push (build_real_cst_from_string (float_type_node
, "-QNaN"));
7302 out
->safe_push (build_real_cst_from_string (float_type_node
, "SNaN"));
7303 out
->safe_push (build_real_cst_from_string (float_type_node
, "-SNaN"));
7304 out
->safe_push (build_real_cst_from_string (float_type_node
, "0.0"));
7305 out
->safe_push (build_real_cst_from_string (float_type_node
, "-0.0"));
7306 out
->safe_push (build_real_cst_from_string (float_type_node
, "Inf"));
7307 out
->safe_push (build_real_cst_from_string (float_type_node
, "-Inf"));
7310 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
7311 if the underlying constants aren't comparable. */
7314 test_tree_cmp_on_constants ()
7316 auto_vec
<tree
> csts
;
7317 append_interesting_constants (&csts
);
7319 /* Try sorting every triple. */
7320 const unsigned num
= csts
.length ();
7321 for (unsigned i
= 0; i
< num
; i
++)
7322 for (unsigned j
= 0; j
< num
; j
++)
7323 for (unsigned k
= 0; k
< num
; k
++)
7325 auto_vec
<tree
> v (3);
7326 v
.quick_push (csts
[i
]);
7327 v
.quick_push (csts
[j
]);
7328 v
.quick_push (csts
[k
]);
7333 /* Implementation detail of the ASSERT_CONDITION_* macros. */
7336 assert_condition (const location
&loc
,
7337 region_model
&model
,
7338 tree lhs
, tree_code op
, tree rhs
,
7341 tristate actual
= model
.eval_condition (lhs
, op
, rhs
, NULL
);
7342 ASSERT_EQ_AT (loc
, actual
, expected
);
7345 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
7348 assert_dump_tree_eq (const location
&loc
, tree t
, const char *expected
)
7350 auto_fix_quotes sentinel
;
7352 pp_format_decoder (&pp
) = default_tree_printer
;
7354 ASSERT_STREQ_AT (loc
, pp_formatted_text (&pp
), expected
);
7357 /* Assert that dump_tree (T) is EXPECTED. */
7359 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
7360 SELFTEST_BEGIN_STMT \
7361 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
7364 /* Implementation detail of ASSERT_DUMP_EQ. */
7367 assert_dump_eq (const location
&loc
,
7368 const region_model
&model
,
7370 const char *expected
)
7372 auto_fix_quotes sentinel
;
7374 pp_format_decoder (&pp
) = default_tree_printer
;
7375 model
.dump_to_pp (&pp
, summarize
);
7376 ASSERT_STREQ_AT (loc
, pp_formatted_text (&pp
), expected
);
7379 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
7381 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
7382 SELFTEST_BEGIN_STMT \
7383 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
7386 /* Smoketest for region_model::dump_to_pp. */
7392 model
.get_root_region ()->ensure_stack_region (&model
);
7393 model
.get_root_region ()->ensure_globals_region (&model
);
7394 model
.get_root_region ()->ensure_heap_region (&model
);
7396 ASSERT_DUMP_EQ (model
, false,
7397 "r0: {kind: `root', parent: null, sval: null}\n"
7398 "|-stack: r1: {kind: `stack', parent: r0, sval: sv0}\n"
7399 "| |: sval: sv0: {poisoned: uninit}\n"
7400 "|-globals: r2: {kind: `globals', parent: r0, sval: null, map: {}}\n"
7401 "`-heap: r3: {kind: `heap', parent: r0, sval: sv1}\n"
7402 " |: sval: sv1: {poisoned: uninit}\n"
7404 " sv0: {poisoned: uninit}\n"
7405 " sv1: {poisoned: uninit}\n"
7406 "constraint manager:\n"
7409 ASSERT_DUMP_EQ (model
, true, "");
7412 /* Helper function for selftests. Create a struct or union type named NAME,
7413 with the fields given by the FIELD_DECLS in FIELDS.
7414 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
7415 create a UNION_TYPE. */
7418 make_test_compound_type (const char *name
, bool is_struct
,
7419 const auto_vec
<tree
> *fields
)
7421 tree t
= make_node (is_struct
? RECORD_TYPE
: UNION_TYPE
);
7422 TYPE_NAME (t
) = get_identifier (name
);
7425 tree fieldlist
= NULL
;
7428 FOR_EACH_VEC_ELT (*fields
, i
, field
)
7430 gcc_assert (TREE_CODE (field
) == FIELD_DECL
);
7431 DECL_CONTEXT (field
) = t
;
7432 fieldlist
= chainon (field
, fieldlist
);
7434 fieldlist
= nreverse (fieldlist
);
7435 TYPE_FIELDS (t
) = fieldlist
;
7441 /* Verify that dumps can show struct fields. */
7446 auto_vec
<tree
> fields
;
7447 tree x_field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
7448 get_identifier ("x"), integer_type_node
);
7449 fields
.safe_push (x_field
);
7450 tree y_field
= build_decl (UNKNOWN_LOCATION
, FIELD_DECL
,
7451 get_identifier ("y"), integer_type_node
);
7452 fields
.safe_push (y_field
);
7453 tree coord_type
= make_test_compound_type ("coord", true, &fields
);
7455 tree c
= build_global_decl ("c", coord_type
);
7456 tree c_x
= build3 (COMPONENT_REF
, TREE_TYPE (x_field
),
7457 c
, x_field
, NULL_TREE
);
7458 tree c_y
= build3 (COMPONENT_REF
, TREE_TYPE (y_field
),
7459 c
, y_field
, NULL_TREE
);
7461 tree int_17
= build_int_cst (integer_type_node
, 17);
7462 tree int_m3
= build_int_cst (integer_type_node
, -3);
7465 model
.set_value (c_x
, int_17
, NULL
);
7466 model
.set_value (c_y
, int_m3
, NULL
);
7468 /* Simplified dump. */
7469 ASSERT_DUMP_EQ (model
, true, "c.x: 17, c.y: -3");
7474 "r0: {kind: `root', parent: null, sval: null}\n"
7475 "`-globals: r1: {kind: `globals', parent: r0, sval: null, map: {`c': r2}}\n"
7476 " `-`c': r2: {kind: `struct', parent: r1, sval: null, type: `struct coord', map: {`x': r3, `y': r4}}\n"
7477 " |: type: `struct coord'\n"
7478 " |-`x': r3: {kind: `primitive', parent: r2, sval: sv0, type: `int'}\n"
7479 " | |: sval: sv0: {type: `int', `17'}\n"
7480 " | |: type: `int'\n"
7481 " `-`y': r4: {kind: `primitive', parent: r2, sval: sv1, type: `int'}\n"
7482 " |: sval: sv1: {type: `int', `-3'}\n"
7485 " sv0: {type: `int', `17'}\n"
7486 " sv1: {type: `int', `-3'}\n"
7487 "constraint manager:\n"
7492 /* Verify that dumps can show array elements. */
7497 tree tlen
= size_int (10);
7498 tree arr_type
= build_array_type (char_type_node
, build_index_type (tlen
));
7500 tree a
= build_global_decl ("a", arr_type
);
7503 tree int_0
= build_int_cst (integer_type_node
, 0);
7504 tree a_0
= build4 (ARRAY_REF
, char_type_node
,
7505 a
, int_0
, NULL_TREE
, NULL_TREE
);
7506 tree char_A
= build_int_cst (char_type_node
, 'A');
7507 model
.set_value (a_0
, char_A
, NULL
);
7509 /* Simplified dump. */
7510 ASSERT_DUMP_EQ (model
, true, "a[0]: 65");
7515 "r0: {kind: `root', parent: null, sval: null}\n"
7516 "`-globals: r1: {kind: `globals', parent: r0, sval: null, map: {`a': r2}}\n"
7517 " `-`a': r2: {kind: `array', parent: r1, sval: null, type: `char[11]', array: {[0]: r3}}\n"
7518 " |: type: `char[11]'\n"
7519 " `-[0]: r3: {kind: `primitive', parent: r2, sval: sv1, type: `char'}\n"
7520 " |: sval: sv1: {type: `char', `65'}\n"
7521 " |: type: `char'\n"
7523 " sv0: {type: `int', `0'}\n"
7524 " sv1: {type: `char', `65'}\n"
7525 "constraint manager:\n"
7530 /* Verify that region_model::get_representative_tree works as expected. */
7533 test_get_representative_tree ()
7537 tree string_cst
= build_string (4, "foo");
7539 svalue_id str_sid
= m
.get_rvalue (string_cst
, NULL
);
7540 tree rep
= m
.get_representative_tree (str_sid
);
7541 ASSERT_EQ (rep
, string_cst
);
7544 /* String literal. */
7546 tree string_cst_ptr
= build_string_literal (4, "foo");
7548 svalue_id str_sid
= m
.get_rvalue (string_cst_ptr
, NULL
);
7549 tree rep
= m
.get_representative_tree (str_sid
);
7550 ASSERT_DUMP_TREE_EQ (rep
, "&\"foo\"[0]");
7554 /* Verify that calling region_model::get_rvalue repeatedly on the same
7555 tree constant retrieves the same svalue_id. */
7558 test_unique_constants ()
7560 tree int_0
= build_int_cst (integer_type_node
, 0);
7561 tree int_42
= build_int_cst (integer_type_node
, 42);
7563 test_region_model_context ctxt
;
7565 ASSERT_EQ (model
.get_rvalue (int_0
, &ctxt
), model
.get_rvalue (int_0
, &ctxt
));
7566 ASSERT_EQ (model
.get_rvalue (int_42
, &ctxt
),
7567 model
.get_rvalue (int_42
, &ctxt
));
7568 ASSERT_NE (model
.get_rvalue (int_0
, &ctxt
), model
.get_rvalue (int_42
, &ctxt
));
7569 ASSERT_EQ (ctxt
.get_num_diagnostics (), 0);
7572 /* Check that operator== and hashing works as expected for the
7573 various svalue subclasses. */
7576 test_svalue_equality ()
7578 tree int_42
= build_int_cst (integer_type_node
, 42);
7579 tree int_0
= build_int_cst (integer_type_node
, 0);
7581 /* Create pairs instances of the various subclasses of svalue,
7582 testing for hash and equality between (this, this) and
7583 (this, other of same subclass). */
7585 = new region_svalue (ptr_type_node
, region_id::from_int (0));
7587 = new region_svalue (ptr_type_node
, region_id::from_int (1));
7589 ASSERT_EQ (ptr_to_r0
->hash (), ptr_to_r0
->hash ());
7590 ASSERT_EQ (*ptr_to_r0
, *ptr_to_r0
);
7592 ASSERT_NE (ptr_to_r0
->hash (), ptr_to_r1
->hash ());
7593 ASSERT_NE (*ptr_to_r0
, *ptr_to_r1
);
7595 svalue
*cst_int_42
= new constant_svalue (int_42
);
7596 svalue
*cst_int_0
= new constant_svalue (int_0
);
7598 ASSERT_EQ (cst_int_42
->hash (), cst_int_42
->hash ());
7599 ASSERT_EQ (*cst_int_42
, *cst_int_42
);
7601 ASSERT_NE (cst_int_42
->hash (), cst_int_0
->hash ());
7602 ASSERT_NE (*cst_int_42
, *cst_int_0
);
7604 svalue
*uninit
= new poisoned_svalue (POISON_KIND_UNINIT
, NULL_TREE
);
7605 svalue
*freed
= new poisoned_svalue (POISON_KIND_FREED
, NULL_TREE
);
7607 ASSERT_EQ (uninit
->hash (), uninit
->hash ());
7608 ASSERT_EQ (*uninit
, *uninit
);
7610 ASSERT_NE (uninit
->hash (), freed
->hash ());
7611 ASSERT_NE (*uninit
, *freed
);
7613 svalue
*unknown_0
= new unknown_svalue (ptr_type_node
);
7614 svalue
*unknown_1
= new unknown_svalue (ptr_type_node
);
7615 ASSERT_EQ (unknown_0
->hash (), unknown_0
->hash ());
7616 ASSERT_EQ (*unknown_0
, *unknown_0
);
7617 ASSERT_EQ (*unknown_1
, *unknown_1
);
7619 /* Comparisons between different kinds of svalue. */
7620 ASSERT_NE (*ptr_to_r0
, *cst_int_42
);
7621 ASSERT_NE (*ptr_to_r0
, *uninit
);
7622 ASSERT_NE (*ptr_to_r0
, *unknown_0
);
7623 ASSERT_NE (*cst_int_42
, *ptr_to_r0
);
7624 ASSERT_NE (*cst_int_42
, *uninit
);
7625 ASSERT_NE (*cst_int_42
, *unknown_0
);
7626 ASSERT_NE (*uninit
, *ptr_to_r0
);
7627 ASSERT_NE (*uninit
, *cst_int_42
);
7628 ASSERT_NE (*uninit
, *unknown_0
);
7629 ASSERT_NE (*unknown_0
, *ptr_to_r0
);
7630 ASSERT_NE (*unknown_0
, *cst_int_42
);
7631 ASSERT_NE (*unknown_0
, *uninit
);
7643 /* Check that operator== and hashing works as expected for the
7644 various region subclasses. */
7647 test_region_equality ()
7650 = new primitive_region (region_id::from_int (3), integer_type_node
);
7652 = new primitive_region (region_id::from_int (4), integer_type_node
);
7654 ASSERT_EQ (*r0
, *r0
);
7655 ASSERT_EQ (r0
->hash (), r0
->hash ());
7656 ASSERT_NE (*r0
, *r1
);
7657 ASSERT_NE (r0
->hash (), r1
->hash ());
7662 // TODO: test coverage for the map within a map_region
7665 /* A subclass of purge_criteria for selftests: purge all svalue_id instances. */
7667 class purge_all_svalue_ids
: public purge_criteria
7670 bool should_purge_p (svalue_id
) const FINAL OVERRIDE
7676 /* A subclass of purge_criteria: purge a specific svalue_id. */
7678 class purge_one_svalue_id
: public purge_criteria
7681 purge_one_svalue_id (svalue_id victim
) : m_victim (victim
) {}
7683 purge_one_svalue_id (region_model model
, tree expr
)
7684 : m_victim (model
.get_rvalue (expr
, NULL
)) {}
7686 bool should_purge_p (svalue_id sid
) const FINAL OVERRIDE
7688 return sid
== m_victim
;
7695 /* Check that constraint_manager::purge works for individual svalue_ids. */
7698 test_purging_by_criteria ()
7700 tree int_42
= build_int_cst (integer_type_node
, 42);
7701 tree int_0
= build_int_cst (integer_type_node
, 0);
7703 tree x
= build_global_decl ("x", integer_type_node
);
7704 tree y
= build_global_decl ("y", integer_type_node
);
7707 region_model model0
;
7708 region_model model1
;
7710 ADD_SAT_CONSTRAINT (model1
, x
, EQ_EXPR
, y
);
7711 ASSERT_NE (model0
, model1
);
7713 purge_stats stats_for_px
;
7714 purge_one_svalue_id
px (model1
, x
);
7715 model1
.get_constraints ()->purge (px
, &stats_for_px
);
7716 ASSERT_EQ (stats_for_px
.m_num_equiv_classes
, 0);
7718 purge_stats stats_for_py
;
7719 purge_one_svalue_id
py (model1
.get_rvalue (y
, NULL
));
7720 model1
.get_constraints ()->purge (py
, &stats_for_py
);
7721 ASSERT_EQ (stats_for_py
.m_num_equiv_classes
, 1);
7723 ASSERT_EQ (*model0
.get_constraints (), *model1
.get_constraints ());
7727 region_model model0
;
7728 region_model model1
;
7730 ADD_SAT_CONSTRAINT (model1
, x
, EQ_EXPR
, int_42
);
7731 ASSERT_NE (model0
, model1
);
7732 ASSERT_CONDITION_TRUE (model1
, x
, EQ_EXPR
, int_42
);
7735 model1
.get_constraints ()->purge (purge_one_svalue_id (model1
, x
), &stats
);
7737 ASSERT_CONDITION_UNKNOWN (model1
, x
, EQ_EXPR
, int_42
);
7741 region_model model0
;
7742 region_model model1
;
7744 ADD_SAT_CONSTRAINT (model1
, x
, GE_EXPR
, int_0
);
7745 ADD_SAT_CONSTRAINT (model1
, x
, LE_EXPR
, int_42
);
7746 ASSERT_NE (model0
, model1
);
7748 ASSERT_CONDITION_TRUE (model1
, x
, GE_EXPR
, int_0
);
7749 ASSERT_CONDITION_TRUE (model1
, x
, LE_EXPR
, int_42
);
7752 model1
.get_constraints ()->purge (purge_one_svalue_id (model1
, x
), &stats
);
7754 ASSERT_CONDITION_UNKNOWN (model1
, x
, GE_EXPR
, int_0
);
7755 ASSERT_CONDITION_UNKNOWN (model1
, x
, LE_EXPR
, int_42
);
7759 region_model model0
;
7760 region_model model1
;
7762 ADD_SAT_CONSTRAINT (model1
, x
, NE_EXPR
, int_42
);
7763 ADD_SAT_CONSTRAINT (model1
, y
, NE_EXPR
, int_0
);
7764 ASSERT_NE (model0
, model1
);
7765 ASSERT_CONDITION_TRUE (model1
, x
, NE_EXPR
, int_42
);
7766 ASSERT_CONDITION_TRUE (model1
, y
, NE_EXPR
, int_0
);
7769 model1
.get_constraints ()->purge (purge_one_svalue_id (model1
, x
), &stats
);
7770 ASSERT_NE (model0
, model1
);
7772 ASSERT_CONDITION_UNKNOWN (model1
, x
, NE_EXPR
, int_42
);
7773 ASSERT_CONDITION_TRUE (model1
, y
, NE_EXPR
, int_0
);
7777 region_model model0
;
7778 region_model model1
;
7780 ADD_SAT_CONSTRAINT (model1
, x
, NE_EXPR
, int_42
);
7781 ADD_SAT_CONSTRAINT (model1
, y
, NE_EXPR
, int_0
);
7782 ASSERT_NE (model0
, model1
);
7783 ASSERT_CONDITION_TRUE (model1
, x
, NE_EXPR
, int_42
);
7784 ASSERT_CONDITION_TRUE (model1
, y
, NE_EXPR
, int_0
);
7787 model1
.get_constraints ()->purge (purge_all_svalue_ids (), &stats
);
7788 ASSERT_CONDITION_UNKNOWN (model1
, x
, NE_EXPR
, int_42
);
7789 ASSERT_CONDITION_UNKNOWN (model1
, y
, NE_EXPR
, int_0
);
7794 /* Test that region_model::purge_unused_svalues works as expected. */
7797 test_purge_unused_svalues ()
7799 tree int_42
= build_int_cst (integer_type_node
, 42);
7800 tree int_0
= build_int_cst (integer_type_node
, 0);
7801 tree x
= build_global_decl ("x", integer_type_node
);
7802 tree y
= build_global_decl ("y", integer_type_node
);
7804 test_region_model_context ctxt
;
7806 model
.set_to_new_unknown_value (model
.get_lvalue (x
, &ctxt
), TREE_TYPE (x
),
7808 model
.set_to_new_unknown_value (model
.get_lvalue (x
, &ctxt
), TREE_TYPE (x
),
7810 model
.set_to_new_unknown_value (model
.get_lvalue (x
, &ctxt
), TREE_TYPE (x
),
7812 model
.add_constraint (x
, NE_EXPR
, int_42
, &ctxt
);
7814 model
.set_value (model
.get_lvalue (x
, &ctxt
),
7815 model
.get_rvalue (int_42
, &ctxt
),
7817 model
.add_constraint (y
, GT_EXPR
, int_0
, &ctxt
);
7819 /* The redundant unknown values should have been purged. */
7821 model
.purge_unused_svalues (&purged
, NULL
);
7822 ASSERT_EQ (purged
.m_num_svalues
, 3);
7824 /* and the redundant constraint on an old, unknown value for x should
7825 have been purged. */
7826 ASSERT_EQ (purged
.m_num_equiv_classes
, 1);
7827 ASSERT_EQ (purged
.m_num_constraints
, 1);
7828 ASSERT_EQ (model
.get_constraints ()->m_constraints
.length (), 2);
7830 /* ...but we should still have x == 42. */
7831 ASSERT_EQ (model
.eval_condition (x
, EQ_EXPR
, int_42
, &ctxt
),
7834 /* ...and we should still have the constraint on y. */
7835 ASSERT_EQ (model
.eval_condition (y
, GT_EXPR
, int_0
, &ctxt
),
7838 ASSERT_EQ (ctxt
.get_num_diagnostics (), 0);
7841 /* Verify that simple assignments work as expected. */
7846 tree int_0
= build_int_cst (integer_type_node
, 0);
7847 tree x
= build_global_decl ("x", integer_type_node
);
7848 tree y
= build_global_decl ("y", integer_type_node
);
7850 /* "x == 0", then use of y, then "y = 0;". */
7852 ADD_SAT_CONSTRAINT (model
, x
, EQ_EXPR
, int_0
);
7853 ASSERT_CONDITION_UNKNOWN (model
, y
, EQ_EXPR
, int_0
);
7854 model
.set_value (model
.get_lvalue (y
, NULL
),
7855 model
.get_rvalue (int_0
, NULL
),
7857 ASSERT_CONDITION_TRUE (model
, y
, EQ_EXPR
, int_0
);
7858 ASSERT_CONDITION_TRUE (model
, y
, EQ_EXPR
, x
);
7860 ASSERT_DUMP_EQ (model
, true, "y: 0, {x}: unknown, x == y");
7863 /* Verify the details of pushing and popping stack frames. */
7866 test_stack_frames ()
7868 tree int_42
= build_int_cst (integer_type_node
, 42);
7869 tree int_10
= build_int_cst (integer_type_node
, 10);
7870 tree int_5
= build_int_cst (integer_type_node
, 5);
7871 tree int_0
= build_int_cst (integer_type_node
, 0);
7873 auto_vec
<tree
> param_types
;
7874 tree parent_fndecl
= make_fndecl (integer_type_node
,
7877 allocate_struct_function (parent_fndecl
, true);
7879 tree child_fndecl
= make_fndecl (integer_type_node
,
7882 allocate_struct_function (child_fndecl
, true);
7884 /* "a" and "b" in the parent frame. */
7885 tree a
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7886 get_identifier ("a"),
7888 tree b
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7889 get_identifier ("b"),
7891 /* "x" and "y" in a child frame. */
7892 tree x
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7893 get_identifier ("x"),
7895 tree y
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7896 get_identifier ("y"),
7900 tree p
= build_global_decl ("p", ptr_type_node
);
7903 tree q
= build_global_decl ("q", ptr_type_node
);
7905 test_region_model_context ctxt
;
7908 /* Push stack frame for "parent_fn". */
7909 region_id parent_frame_rid
7910 = model
.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl
), NULL
, &ctxt
);
7911 ASSERT_EQ (model
.get_current_frame_id (), parent_frame_rid
);
7912 region_id a_in_parent_rid
= model
.get_lvalue (a
, &ctxt
);
7913 model
.set_value (a_in_parent_rid
, model
.get_rvalue (int_42
, &ctxt
), &ctxt
);
7914 model
.set_to_new_unknown_value (model
.get_lvalue (b
, &ctxt
),
7915 integer_type_node
, &ctxt
);
7916 model
.add_constraint (b
, LT_EXPR
, int_10
, &ctxt
);
7917 ASSERT_EQ (model
.eval_condition (b
, LT_EXPR
, int_10
, &ctxt
),
7918 tristate (tristate::TS_TRUE
));
7920 /* Push stack frame for "child_fn". */
7921 region_id child_frame_rid
7922 = model
.push_frame (DECL_STRUCT_FUNCTION (child_fndecl
), NULL
, &ctxt
);
7923 ASSERT_EQ (model
.get_current_frame_id (), child_frame_rid
);
7924 region_id x_in_child_rid
= model
.get_lvalue (x
, &ctxt
);
7925 model
.set_value (x_in_child_rid
, model
.get_rvalue (int_0
, &ctxt
), &ctxt
);
7926 model
.set_to_new_unknown_value (model
.get_lvalue (y
, &ctxt
),
7927 integer_type_node
, &ctxt
);
7928 model
.add_constraint (y
, NE_EXPR
, int_5
, &ctxt
);
7929 ASSERT_EQ (model
.eval_condition (y
, NE_EXPR
, int_5
, &ctxt
),
7930 tristate (tristate::TS_TRUE
));
7932 /* Point a global pointer at a local in the child frame: p = &x. */
7933 region_id p_in_globals_rid
= model
.get_lvalue (p
, &ctxt
);
7934 model
.set_value (p_in_globals_rid
,
7935 model
.get_or_create_ptr_svalue (ptr_type_node
,
7939 /* Point another global pointer at p: q = &p. */
7940 region_id q_in_globals_rid
= model
.get_lvalue (q
, &ctxt
);
7941 model
.set_value (q_in_globals_rid
,
7942 model
.get_or_create_ptr_svalue (ptr_type_node
,
7946 /* Test get_descendents. */
7947 region_id_set
descendents (&model
);
7948 model
.get_descendents (child_frame_rid
, &descendents
, region_id::null ());
7949 ASSERT_TRUE (descendents
.region_p (child_frame_rid
));
7950 ASSERT_TRUE (descendents
.region_p (x_in_child_rid
));
7951 ASSERT_FALSE (descendents
.region_p (a_in_parent_rid
));
7952 ASSERT_EQ (descendents
.num_regions (), 3);
7954 auto_vec
<region_id
> test_vec
;
7955 for (region_id_set::iterator_t iter
= descendents
.begin ();
7956 iter
!= descendents
.end ();
7958 test_vec
.safe_push (*iter
);
7959 gcc_unreachable (); // TODO
7963 ASSERT_DUMP_EQ (model
, true,
7964 "a: 42, x: 0, p: &x, q: &p, {b, y}: unknown, b < 10, y != 5");
7966 /* Pop the "child_fn" frame from the stack. */
7968 model
.pop_frame (true, &purged
, &ctxt
);
7970 /* We should have purged the unknown values for x and y. */
7971 ASSERT_EQ (purged
.m_num_svalues
, 2);
7973 /* We should have purged the frame region and the regions for x and y. */
7974 ASSERT_EQ (purged
.m_num_regions
, 3);
7976 /* We should have purged the constraint on y. */
7977 ASSERT_EQ (purged
.m_num_equiv_classes
, 1);
7978 ASSERT_EQ (purged
.m_num_constraints
, 1);
7980 /* Verify that p (which was pointing at the local "x" in the popped
7981 frame) has been poisoned. */
7982 svalue
*new_p_sval
= model
.get_svalue (model
.get_rvalue (p
, &ctxt
));
7983 ASSERT_EQ (new_p_sval
->get_kind (), SK_POISONED
);
7984 ASSERT_EQ (new_p_sval
->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7985 POISON_KIND_POPPED_STACK
);
7987 /* Verify that q still points to p, in spite of the region
7989 svalue
*new_q_sval
= model
.get_svalue (model
.get_rvalue (q
, &ctxt
));
7990 ASSERT_EQ (new_q_sval
->get_kind (), SK_REGION
);
7991 ASSERT_EQ (new_q_sval
->dyn_cast_region_svalue ()->get_pointee (),
7992 model
.get_lvalue (p
, &ctxt
));
7994 /* Verify that top of stack has been updated. */
7995 ASSERT_EQ (model
.get_current_frame_id (), parent_frame_rid
);
7997 /* Verify locals in parent frame. */
7998 /* Verify "a" still has its value. */
7999 svalue
*new_a_sval
= model
.get_svalue (model
.get_rvalue (a
, &ctxt
));
8000 ASSERT_EQ (new_a_sval
->get_kind (), SK_CONSTANT
);
8001 ASSERT_EQ (new_a_sval
->dyn_cast_constant_svalue ()->get_constant (),
8003 /* Verify "b" still has its constraint. */
8004 ASSERT_EQ (model
.eval_condition (b
, LT_EXPR
, int_10
, &ctxt
),
8005 tristate (tristate::TS_TRUE
));
8008 /* Verify that get_representative_path_var works as expected, that
8009 we can map from region ids to parms and back within a recursive call
8013 test_get_representative_path_var ()
8015 auto_vec
<tree
> param_types
;
8016 tree fndecl
= make_fndecl (integer_type_node
,
8019 allocate_struct_function (fndecl
, true);
8022 tree n
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
8023 get_identifier ("n"),
8028 /* Push 5 stack frames for "factorial", each with a param */
8029 auto_vec
<region_id
> parm_rids
;
8030 auto_vec
<svalue_id
> parm_sids
;
8031 for (int depth
= 0; depth
< 5; depth
++)
8034 = model
.push_frame (DECL_STRUCT_FUNCTION (fndecl
), NULL
, NULL
);
8035 region_id rid_n
= model
.get_lvalue (path_var (n
, depth
), NULL
);
8036 parm_rids
.safe_push (rid_n
);
8038 ASSERT_EQ (model
.get_region (rid_n
)->get_parent (), frame_rid
);
8041 = model
.set_to_new_unknown_value (rid_n
, integer_type_node
, NULL
);
8042 parm_sids
.safe_push (sid_n
);
8045 /* Verify that we can recognize that the regions are the parms,
8047 for (int depth
= 0; depth
< 5; depth
++)
8049 ASSERT_EQ (model
.get_representative_path_var (parm_rids
[depth
]),
8050 path_var (n
, depth
));
8051 /* ...and that we can lookup lvalues for locals for all frames,
8052 not just the top. */
8053 ASSERT_EQ (model
.get_lvalue (path_var (n
, depth
), NULL
),
8055 /* ...and that we can locate the svalues. */
8056 auto_vec
<path_var
> pvs
;
8057 model
.get_path_vars_for_svalue (parm_sids
[depth
], &pvs
);
8058 ASSERT_EQ (pvs
.length (), 1);
8059 ASSERT_EQ (pvs
[0], path_var (n
, depth
));
8063 /* Verify that the core regions within a region_model are in a consistent
8064 order after canonicalization. */
8067 test_canonicalization_1 ()
8069 region_model model0
;
8070 model0
.get_root_region ()->ensure_stack_region (&model0
);
8071 model0
.get_root_region ()->ensure_globals_region (&model0
);
8073 region_model model1
;
8074 model1
.get_root_region ()->ensure_globals_region (&model1
);
8075 model1
.get_root_region ()->ensure_stack_region (&model1
);
8077 model0
.canonicalize (NULL
);
8078 model1
.canonicalize (NULL
);
8079 ASSERT_EQ (model0
, model1
);
8082 /* Verify that region models for
8086 are equal after canonicalization. */
8089 test_canonicalization_2 ()
8091 tree int_42
= build_int_cst (integer_type_node
, 42);
8092 tree int_113
= build_int_cst (integer_type_node
, 113);
8093 tree x
= build_global_decl ("x", integer_type_node
);
8094 tree y
= build_global_decl ("y", integer_type_node
);
8096 region_model model0
;
8097 model0
.set_value (model0
.get_lvalue (x
, NULL
),
8098 model0
.get_rvalue (int_42
, NULL
),
8100 model0
.set_value (model0
.get_lvalue (y
, NULL
),
8101 model0
.get_rvalue (int_113
, NULL
),
8104 region_model model1
;
8105 model1
.set_value (model1
.get_lvalue (y
, NULL
),
8106 model1
.get_rvalue (int_113
, NULL
),
8108 model1
.set_value (model1
.get_lvalue (x
, NULL
),
8109 model1
.get_rvalue (int_42
, NULL
),
8112 model0
.canonicalize (NULL
);
8113 model1
.canonicalize (NULL
);
8114 ASSERT_EQ (model0
, model1
);
8117 /* Verify that constraints for
8121 are equal after canonicalization. */
8124 test_canonicalization_3 ()
8126 tree int_3
= build_int_cst (integer_type_node
, 3);
8127 tree int_42
= build_int_cst (integer_type_node
, 42);
8128 tree x
= build_global_decl ("x", integer_type_node
);
8129 tree y
= build_global_decl ("y", integer_type_node
);
8131 region_model model0
;
8132 model0
.add_constraint (x
, GT_EXPR
, int_3
, NULL
);
8133 model0
.add_constraint (y
, GT_EXPR
, int_42
, NULL
);
8135 region_model model1
;
8136 model1
.add_constraint (y
, GT_EXPR
, int_42
, NULL
);
8137 model1
.add_constraint (x
, GT_EXPR
, int_3
, NULL
);
8139 model0
.canonicalize (NULL
);
8140 model1
.canonicalize (NULL
);
8141 ASSERT_EQ (model0
, model1
);
8144 /* Verify that we can canonicalize a model containing NaN and other real
8148 test_canonicalization_4 ()
8150 auto_vec
<tree
> csts
;
8151 append_interesting_constants (&csts
);
8157 FOR_EACH_VEC_ELT (csts
, i
, cst
)
8158 model
.get_rvalue (cst
, NULL
);
8160 model
.canonicalize (NULL
);
8163 /* Assert that if we have two region_model instances
8164 with values VAL_A and VAL_B for EXPR that they are
8165 mergable. Write the merged model to *OUT_MERGED_MODEL,
8166 and the merged svalue ptr to *OUT_MERGED_SVALUE.
8167 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
8168 for that region_model. */
8171 assert_region_models_merge (tree expr
, tree val_a
, tree val_b
,
8172 region_model
*out_merged_model
,
8173 svalue
**out_merged_svalue
)
8175 test_region_model_context ctxt
;
8176 region_model model0
;
8177 region_model model1
;
8179 model0
.set_value (model0
.get_lvalue (expr
, &ctxt
),
8180 model0
.get_rvalue (val_a
, &ctxt
),
8183 model1
.set_value (model1
.get_lvalue (expr
, &ctxt
),
8184 model1
.get_rvalue (val_b
, &ctxt
),
8187 /* They should be mergeable. */
8188 ASSERT_TRUE (model0
.can_merge_with_p (model1
, out_merged_model
));
8190 svalue_id merged_svalue_sid
= out_merged_model
->get_rvalue (expr
, &ctxt
);
8191 *out_merged_svalue
= out_merged_model
->get_svalue (merged_svalue_sid
);
8194 /* Verify that we can merge region_model instances. */
8197 test_state_merging ()
8199 tree int_42
= build_int_cst (integer_type_node
, 42);
8200 tree int_113
= build_int_cst (integer_type_node
, 113);
8201 tree x
= build_global_decl ("x", integer_type_node
);
8202 tree y
= build_global_decl ("y", integer_type_node
);
8203 tree z
= build_global_decl ("z", integer_type_node
);
8204 tree p
= build_global_decl ("p", ptr_type_node
);
8206 tree addr_of_y
= build1 (ADDR_EXPR
, ptr_type_node
, y
);
8207 tree addr_of_z
= build1 (ADDR_EXPR
, ptr_type_node
, z
);
8209 auto_vec
<tree
> param_types
;
8210 tree test_fndecl
= make_fndecl (integer_type_node
, "test_fn", param_types
);
8211 allocate_struct_function (test_fndecl
, true);
8214 tree a
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
8215 get_identifier ("a"),
8217 tree addr_of_a
= build1 (ADDR_EXPR
, ptr_type_node
, a
);
8219 /* Param "q", a pointer. */
8220 tree q
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
8221 get_identifier ("q"),
8225 region_model model0
;
8226 region_model model1
;
8227 region_model merged
;
8228 /* Verify empty models can be merged. */
8229 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8230 ASSERT_EQ (model0
, merged
);
8233 /* Verify that we can merge two contradictory constraints on the
8234 value for a global. */
8235 /* TODO: verify that the merged model doesn't have a value for
8238 region_model model0
;
8239 region_model model1
;
8240 region_model merged
;
8241 test_region_model_context ctxt
;
8242 model0
.add_constraint (x
, EQ_EXPR
, int_42
, &ctxt
);
8243 model1
.add_constraint (x
, EQ_EXPR
, int_113
, &ctxt
);
8244 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8245 ASSERT_NE (model0
, merged
);
8246 ASSERT_NE (model1
, merged
);
8249 /* Verify handling of a PARM_DECL. */
8251 test_region_model_context ctxt
;
8252 region_model model0
;
8253 region_model model1
;
8254 ASSERT_EQ (model0
.get_stack_depth (), 0);
8255 model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, &ctxt
);
8256 ASSERT_EQ (model0
.get_stack_depth (), 1);
8257 ASSERT_EQ (model0
.get_function_at_depth (0),
8258 DECL_STRUCT_FUNCTION (test_fndecl
));
8259 model1
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, &ctxt
);
8262 = model0
.set_to_new_unknown_value (model0
.get_lvalue (a
, &ctxt
),
8263 integer_type_node
, &ctxt
);
8264 model1
.set_to_new_unknown_value (model1
.get_lvalue (a
, &ctxt
),
8265 integer_type_node
, &ctxt
);
8266 ASSERT_EQ (model0
, model1
);
8268 /* Check that get_value_by_name works for locals. */
8269 ASSERT_EQ (model0
.get_value_by_name ("a"), sid_a
);
8271 /* They should be mergeable, and the result should be the same. */
8272 region_model merged
;
8273 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8274 ASSERT_EQ (model0
, merged
);
8275 /* In particular, there should be an unknown value for "a". */
8276 svalue
*merged_a_sval
= merged
.get_svalue (merged
.get_rvalue (a
, &ctxt
));
8277 ASSERT_EQ (merged_a_sval
->get_kind (), SK_UNKNOWN
);
8280 /* Verify handling of a global. */
8282 test_region_model_context ctxt
;
8283 region_model model0
;
8284 region_model model1
;
8286 = model0
.set_to_new_unknown_value (model0
.get_lvalue (x
, &ctxt
),
8287 integer_type_node
, &ctxt
);
8288 model1
.set_to_new_unknown_value (model1
.get_lvalue (x
, &ctxt
),
8289 integer_type_node
, &ctxt
);
8290 ASSERT_EQ (model0
, model1
);
8292 /* Check that get_value_by_name works for globals. */
8293 ASSERT_EQ (model0
.get_value_by_name ("x"), sid_x
);
8295 /* They should be mergeable, and the result should be the same. */
8296 region_model merged
;
8297 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8298 ASSERT_EQ (model0
, merged
);
8299 /* In particular, there should be an unknown value for "x". */
8300 svalue
*merged_x_sval
= merged
.get_svalue (merged
.get_rvalue (x
, &ctxt
));
8301 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
8304 /* Use global-handling to verify various combinations of values. */
8306 /* Two equal constant values. */
8308 region_model merged
;
8309 svalue
*merged_x_sval
;
8310 assert_region_models_merge (x
, int_42
, int_42
, &merged
, &merged_x_sval
);
8312 /* In particular, there should be a constant value for "x". */
8313 ASSERT_EQ (merged_x_sval
->get_kind (), SK_CONSTANT
);
8314 ASSERT_EQ (merged_x_sval
->dyn_cast_constant_svalue ()->get_constant (),
8318 /* Two non-equal constant values. */
8320 region_model merged
;
8321 svalue
*merged_x_sval
;
8322 assert_region_models_merge (x
, int_42
, int_113
, &merged
, &merged_x_sval
);
8324 /* In particular, there should be an unknown value for "x". */
8325 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
8328 /* Uninit and constant. */
8330 region_model merged
;
8331 svalue
*merged_x_sval
;
8332 assert_region_models_merge (x
, NULL_TREE
, int_113
, &merged
, &merged_x_sval
);
8334 /* In particular, there should be an unknown value for "x". */
8335 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
8338 /* Constant and uninit. */
8340 region_model merged
;
8341 svalue
*merged_x_sval
;
8342 assert_region_models_merge (x
, int_42
, NULL_TREE
, &merged
, &merged_x_sval
);
8344 /* In particular, there should be an unknown value for "x". */
8345 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
8348 /* Unknown and constant. */
8351 /* Pointers: NULL and NULL. */
8354 /* Pointers: NULL and non-NULL. */
8357 /* Pointers: non-NULL and non-NULL: ptr to a local. */
8359 region_model model0
;
8360 model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, NULL
);
8361 model0
.set_to_new_unknown_value (model0
.get_lvalue (a
, NULL
),
8362 integer_type_node
, NULL
);
8363 model0
.set_value (model0
.get_lvalue (p
, NULL
),
8364 model0
.get_rvalue (addr_of_a
, NULL
), NULL
);
8366 region_model
model1 (model0
);
8367 ASSERT_EQ (model0
, model1
);
8369 /* They should be mergeable, and the result should be the same. */
8370 region_model merged
;
8371 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8372 ASSERT_EQ (model0
, merged
);
8375 /* Pointers: non-NULL and non-NULL: ptr to a global. */
8377 region_model merged
;
8378 /* p == &y in both input models. */
8379 svalue
*merged_p_sval
;
8380 assert_region_models_merge (p
, addr_of_y
, addr_of_y
, &merged
,
8383 /* We should get p == &y in the merged model. */
8384 ASSERT_EQ (merged_p_sval
->get_kind (), SK_REGION
);
8385 region_svalue
*merged_p_ptr
= merged_p_sval
->dyn_cast_region_svalue ();
8386 region_id merged_p_star_rid
= merged_p_ptr
->get_pointee ();
8387 ASSERT_EQ (merged_p_star_rid
, merged
.get_lvalue (y
, NULL
));
8390 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
8392 region_model merged
;
8393 /* x == &y vs x == &z in the input models. */
8394 svalue
*merged_x_sval
;
8395 assert_region_models_merge (x
, addr_of_y
, addr_of_z
, &merged
,
8398 /* We should get x == unknown in the merged model. */
8399 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
8402 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
8404 test_region_model_context ctxt
;
8405 region_model model0
;
8406 region_id new_rid
= model0
.add_new_malloc_region ();
8408 = model0
.get_or_create_ptr_svalue (ptr_type_node
, new_rid
);
8409 model0
.set_value (model0
.get_lvalue (p
, &ctxt
),
8411 model0
.canonicalize (&ctxt
);
8413 region_model
model1 (model0
);
8415 ASSERT_EQ (model0
, model1
);
8417 region_model merged
;
8418 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8420 merged
.canonicalize (&ctxt
);
8422 /* The merged model ought to be identical (after canonicalization,
8424 ASSERT_EQ (model0
, merged
);
8427 /* Two regions sharing the same unknown svalue should continue sharing
8428 an unknown svalue after self-merger. */
8430 test_region_model_context ctxt
;
8431 region_model model0
;
8433 = model0
.set_to_new_unknown_value (model0
.get_lvalue (x
, &ctxt
),
8434 integer_type_node
, &ctxt
);
8435 model0
.set_value (model0
.get_lvalue (y
, &ctxt
), sid
, &ctxt
);
8436 region_model
model1 (model0
);
8438 /* They should be mergeable, and the result should be the same. */
8439 region_model merged
;
8440 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8441 ASSERT_EQ (model0
, merged
);
8443 /* In particular, we should have x == y. */
8444 ASSERT_EQ (merged
.eval_condition (x
, EQ_EXPR
, y
, &ctxt
),
8445 tristate (tristate::TS_TRUE
));
8450 region_model model0
;
8451 region_model model1
;
8452 test_region_model_context ctxt
;
8453 model0
.add_constraint (x
, EQ_EXPR
, int_42
, &ctxt
);
8454 model1
.add_constraint (x
, NE_EXPR
, int_42
, &ctxt
);
8455 ASSERT_TRUE (model0
.can_merge_with_p (model1
));
8459 region_model model0
;
8460 region_model model1
;
8461 test_region_model_context ctxt
;
8462 model0
.add_constraint (x
, EQ_EXPR
, int_42
, &ctxt
);
8463 model1
.add_constraint (x
, NE_EXPR
, int_42
, &ctxt
);
8464 model1
.add_constraint (x
, EQ_EXPR
, int_113
, &ctxt
);
8465 ASSERT_TRUE (model0
.can_merge_with_p (model1
));
8469 // TODO: what can't we merge? need at least one such test
8471 /* TODO: various things
8474 - every combination, but in particular
8480 test_region_model_context ctxt
;
8481 region_model model0
;
8483 region_id x_rid
= model0
.get_lvalue (x
, &ctxt
);
8484 region_id x_as_ptr
= model0
.get_or_create_view (x_rid
, ptr_type_node
,
8486 model0
.set_value (x_as_ptr
, model0
.get_rvalue (addr_of_y
, &ctxt
), &ctxt
);
8488 region_model
model1 (model0
);
8489 ASSERT_EQ (model1
, model0
);
8491 /* They should be mergeable, and the result should be the same. */
8492 region_model merged
;
8493 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8496 /* Verify that we can merge a model in which a local in an older stack
8497 frame points to a local in a more recent stack frame. */
8499 region_model model0
;
8500 model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, NULL
);
8501 region_id q_in_first_frame
= model0
.get_lvalue (q
, NULL
);
8503 /* Push a second frame. */
8504 region_id rid_2nd_frame
8505 = model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, NULL
);
8507 /* Have a pointer in the older frame point to a local in the
8508 more recent frame. */
8509 svalue_id sid_ptr
= model0
.get_rvalue (addr_of_a
, NULL
);
8510 model0
.set_value (q_in_first_frame
, sid_ptr
, NULL
);
8512 /* Verify that it's pointing at the newer frame. */
8513 region_id rid_pointee
8514 = model0
.get_svalue (sid_ptr
)->dyn_cast_region_svalue ()->get_pointee ();
8515 ASSERT_EQ (model0
.get_region (rid_pointee
)->get_parent (), rid_2nd_frame
);
8517 model0
.canonicalize (NULL
);
8519 region_model
model1 (model0
);
8520 ASSERT_EQ (model0
, model1
);
8522 /* They should be mergeable, and the result should be the same
8523 (after canonicalization, at least). */
8524 region_model merged
;
8525 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8526 merged
.canonicalize (NULL
);
8527 ASSERT_EQ (model0
, merged
);
8530 /* Verify that we can merge a model in which a local points to a global. */
8532 region_model model0
;
8533 model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, NULL
);
8534 model0
.set_value (model0
.get_lvalue (q
, NULL
),
8535 model0
.get_rvalue (addr_of_y
, NULL
), NULL
);
8537 model0
.canonicalize (NULL
);
8539 region_model
model1 (model0
);
8540 ASSERT_EQ (model0
, model1
);
8542 /* They should be mergeable, and the result should be the same
8543 (after canonicalization, at least). */
8544 region_model merged
;
8545 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8546 merged
.canonicalize (NULL
);
8547 ASSERT_EQ (model0
, merged
);
8551 /* Verify that constraints are correctly merged when merging region_model
8555 test_constraint_merging ()
8557 tree int_0
= build_int_cst (integer_type_node
, 0);
8558 tree int_5
= build_int_cst (integer_type_node
, 5);
8559 tree x
= build_global_decl ("x", integer_type_node
);
8560 tree y
= build_global_decl ("y", integer_type_node
);
8561 tree z
= build_global_decl ("z", integer_type_node
);
8562 tree n
= build_global_decl ("n", integer_type_node
);
8564 test_region_model_context ctxt
;
8566 /* model0: 0 <= (x == y) < n. */
8567 region_model model0
;
8568 model0
.set_to_new_unknown_value (model0
.get_lvalue (x
, &ctxt
),
8569 integer_type_node
, &ctxt
);
8570 model0
.add_constraint (x
, EQ_EXPR
, y
, &ctxt
);
8571 model0
.add_constraint (x
, GE_EXPR
, int_0
, NULL
);
8572 model0
.add_constraint (x
, LT_EXPR
, n
, NULL
);
8574 /* model1: z != 5 && (0 <= x < n). */
8575 region_model model1
;
8576 model1
.set_to_new_unknown_value (model1
.get_lvalue (x
, &ctxt
),
8577 integer_type_node
, &ctxt
);
8578 model1
.add_constraint (z
, NE_EXPR
, int_5
, NULL
);
8579 model1
.add_constraint (x
, GE_EXPR
, int_0
, NULL
);
8580 model1
.add_constraint (x
, LT_EXPR
, n
, NULL
);
8582 /* They should be mergeable; the merged constraints should
8583 be: (0 <= x < n). */
8584 region_model merged
;
8585 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8587 ASSERT_EQ (merged
.eval_condition (x
, GE_EXPR
, int_0
, &ctxt
),
8588 tristate (tristate::TS_TRUE
));
8589 ASSERT_EQ (merged
.eval_condition (x
, LT_EXPR
, n
, &ctxt
),
8590 tristate (tristate::TS_TRUE
));
8592 ASSERT_EQ (merged
.eval_condition (z
, NE_EXPR
, int_5
, &ctxt
),
8593 tristate (tristate::TS_UNKNOWN
));
8594 ASSERT_EQ (merged
.eval_condition (x
, LT_EXPR
, y
, &ctxt
),
8595 tristate (tristate::TS_UNKNOWN
));
8598 /* Run all of the selftests within this file. */
8601 analyzer_region_model_cc_tests ()
8603 test_tree_cmp_on_constants ();
8607 test_get_representative_tree ();
8608 test_unique_constants ();
8609 test_svalue_equality ();
8610 test_region_equality ();
8611 test_purging_by_criteria ();
8612 test_purge_unused_svalues ();
8614 test_stack_frames ();
8615 test_get_representative_path_var ();
8616 test_canonicalization_1 ();
8617 test_canonicalization_2 ();
8618 test_canonicalization_3 ();
8619 test_canonicalization_4 ();
8620 test_state_merging ();
8621 test_constraint_merging ();
8624 } // namespace selftest
8626 #endif /* CHECKING_P */
8630 #endif /* #if ENABLE_ANALYZER */