analyzer: make summarized dumps more comprehensive
[gcc.git] / gcc / analyzer / region-model.cc
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "gimple-iterator.h"
29 #include "diagnostic-core.h"
30 #include "graphviz.h"
31 #include "options.h"
32 #include "cgraph.h"
33 #include "tree-dfa.h"
34 #include "stringpool.h"
35 #include "convert.h"
36 #include "target.h"
37 #include "fold-const.h"
38 #include "tree-pretty-print.h"
39 #include "diagnostic-color.h"
40 #include "diagnostic-metadata.h"
41 #include "tristate.h"
42 #include "bitmap.h"
43 #include "selftest.h"
44 #include "function.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
48 #include "options.h"
49 #include "cgraph.h"
50 #include "cfg.h"
51 #include "digraph.h"
52 #include "analyzer/supergraph.h"
53 #include "sbitmap.h"
54 #include "analyzer/region-model.h"
55 #include "analyzer/constraint-manager.h"
56 #include "diagnostic-event-id.h"
57 #include "analyzer/sm.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "analyzer/pending-diagnostic.h"
61 #include "analyzer/analyzer-selftests.h"
62 #include "stor-layout.h"
63
64 #if ENABLE_ANALYZER
65
66 namespace ana {
67
68 /* Dump T to PP in language-independent form, for debugging/logging/dumping
69 purposes. */
70
71 static void
72 dump_tree (pretty_printer *pp, tree t)
73 {
74 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
75 }
76
77 /* Dump T to PP in language-independent form in quotes, for
78 debugging/logging/dumping purposes. */
79
80 void
81 dump_quoted_tree (pretty_printer *pp, tree t)
82 {
83 pp_begin_quote (pp, pp_show_color (pp));
84 dump_tree (pp, t);
85 pp_end_quote (pp, pp_show_color (pp));
86 }
87
88 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
89 calls within other pp_printf calls.
90
91 default_tree_printer handles 'T' and some other codes by calling
92 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
93 dump_generic_node calls pp_printf in various places, leading to
94 garbled output.
95
96 Ideally pp_printf could be made to be reentrant, but in the meantime
97 this function provides a workaround. */
98
99 static void
100 print_quoted_type (pretty_printer *pp, tree t)
101 {
102 pp_begin_quote (pp, pp_show_color (pp));
103 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
104 pp_end_quote (pp, pp_show_color (pp));
105 }
106
107 /* Dump this path_var to PP (which must support %E for trees).
108
109 Express the stack depth using an "@DEPTH" suffix, so e.g. given
110 void foo (int j);
111 void bar (int i)
112 {
113 foo (i);
114 }
115 then:
116 - the "i" in "bar" would be "(i @ 0)"
117 - the "j" in "foo" would be "(j @ 1)". */
118
119 void
120 path_var::dump (pretty_printer *pp) const
121 {
122 if (m_tree == NULL_TREE)
123 pp_string (pp, "NULL");
124 if (CONSTANT_CLASS_P (m_tree))
125 pp_printf (pp, "%qE", m_tree);
126 else
127 pp_printf (pp, "(%qE @ %i)", m_tree, m_stack_depth);
128 }
129
130 /* For use in printing a comma-separated list. */
131
132 static void
133 dump_separator (pretty_printer *pp, bool *is_first)
134 {
135 if (!*is_first)
136 pp_string (pp, ", ");
137 *is_first = false;
138 }
139
140 /* Concrete subclass of constraint_manager that wires it up to a region_model
141 (whilst allowing the constraint_manager and region_model to be somewhat
142 at arms length).
143 TODO: revisit this; maybe put the region_model * into the constraint_manager
144 base class. */
145
146 class impl_constraint_manager : public constraint_manager
147 {
148 public:
149 impl_constraint_manager (region_model *model)
150 : constraint_manager (),
151 m_model (model)
152 {}
153
154 impl_constraint_manager (const impl_constraint_manager &other,
155 region_model *model)
156 : constraint_manager (other),
157 m_model (model)
158 {}
159
160 constraint_manager *clone (region_model *model) const
161 {
162 return new impl_constraint_manager (*this, model);
163 }
164
165 tree maybe_get_constant (svalue_id sid) const FINAL OVERRIDE
166 {
167 svalue *svalue = m_model->get_svalue (sid);
168 return svalue->maybe_get_constant ();
169 }
170
171 svalue_id get_sid_for_constant (tree cst) const FINAL OVERRIDE
172 {
173 gcc_assert (CONSTANT_CLASS_P (cst));
174 return m_model->get_rvalue (cst, NULL);
175 }
176
177 int get_num_svalues () const FINAL OVERRIDE
178 {
179 return m_model->get_num_svalues ();
180 }
181
182 private:
183 region_model *m_model;
184 };
185
186 /* class svalue_id. */
187
188 /* Print this svalue_id to PP. */
189
190 void
191 svalue_id::print (pretty_printer *pp) const
192 {
193 if (null_p ())
194 pp_printf (pp, "null");
195 else
196 pp_printf (pp, "sv%i", m_idx);
197 }
198
199 /* Print this svalue_id in .dot format to PP. */
200
201 void
202 svalue_id::dump_node_name_to_pp (pretty_printer *pp) const
203 {
204 gcc_assert (!null_p ());
205 pp_printf (pp, "svalue_%i", m_idx);
206 }
207
208 /* Assert that this object is valid (w.r.t. MODEL). */
209
210 void
211 svalue_id::validate (const region_model &model) const
212 {
213 gcc_assert (null_p () || m_idx < (int)model.get_num_svalues ());
214 }
215
216 /* class region_id. */
217
218 /* Print this region_id to PP. */
219
220 void
221 region_id::print (pretty_printer *pp) const
222 {
223 if (null_p ())
224 pp_printf (pp, "null");
225 else
226 pp_printf (pp, "r%i", m_idx);
227 }
228
229 /* Print this region_id in .dot format to PP. */
230
231 void
232 region_id::dump_node_name_to_pp (pretty_printer *pp) const
233 {
234 gcc_assert (!null_p ());
235 pp_printf (pp, "region_%i", m_idx);
236 }
237
238 /* Assert that this object is valid (w.r.t. MODEL). */
239
240 void
241 region_id::validate (const region_model &model) const
242 {
243 gcc_assert (null_p () || m_idx < (int)model.get_num_regions ());
244 }
245
246 /* class id_set. */
247
248 /* id_set<region_id>'s ctor. */
249
250 template<>
251 id_set<region_id>::id_set (const region_model *model)
252 : m_bitmap (model->get_num_regions ())
253 {
254 bitmap_clear (m_bitmap);
255 }
256
257 /* class svalue and its various subclasses. */
258
259 /* class svalue. */
260
261 /* svalue's equality operator. Most of the work is done by the
262 a "compare_fields" implementation on each subclass. */
263
264 bool
265 svalue::operator== (const svalue &other) const
266 {
267 enum svalue_kind this_kind = get_kind ();
268 enum svalue_kind other_kind = other.get_kind ();
269 if (this_kind != other_kind)
270 return false;
271
272 if (m_type != other.m_type)
273 return false;
274
275 switch (this_kind)
276 {
277 default:
278 gcc_unreachable ();
279 case SK_REGION:
280 {
281 const region_svalue &this_sub
282 = (const region_svalue &)*this;
283 const region_svalue &other_sub
284 = (const region_svalue &)other;
285 return this_sub.compare_fields (other_sub);
286 }
287 break;
288 case SK_CONSTANT:
289 {
290 const constant_svalue &this_sub
291 = (const constant_svalue &)*this;
292 const constant_svalue &other_sub
293 = (const constant_svalue &)other;
294 return this_sub.compare_fields (other_sub);
295 }
296 break;
297 case SK_UNKNOWN:
298 {
299 const unknown_svalue &this_sub
300 = (const unknown_svalue &)*this;
301 const unknown_svalue &other_sub
302 = (const unknown_svalue &)other;
303 return this_sub.compare_fields (other_sub);
304 }
305 break;
306 case SK_POISONED:
307 {
308 const poisoned_svalue &this_sub
309 = (const poisoned_svalue &)*this;
310 const poisoned_svalue &other_sub
311 = (const poisoned_svalue &)other;
312 return this_sub.compare_fields (other_sub);
313 }
314 break;
315 case SK_SETJMP:
316 {
317 const setjmp_svalue &this_sub
318 = (const setjmp_svalue &)*this;
319 const setjmp_svalue &other_sub
320 = (const setjmp_svalue &)other;
321 return this_sub.compare_fields (other_sub);
322 }
323 break;
324 }
325 }
326
327 /* Generate a hash value for this svalue. Most of the work is done by the
328 add_to_hash vfunc. */
329
330 hashval_t
331 svalue::hash () const
332 {
333 inchash::hash hstate;
334 if (m_type)
335 hstate.add_int (TYPE_UID (m_type));
336 add_to_hash (hstate);
337 return hstate.end ();
338 }
339
340 /* Print this svalue and its ID to PP. */
341
342 void
343 svalue::print (const region_model &model,
344 svalue_id this_sid,
345 pretty_printer *pp) const
346 {
347 this_sid.print (pp);
348 pp_string (pp, ": {");
349
350 if (m_type)
351 {
352 gcc_assert (TYPE_P (m_type));
353 pp_string (pp, "type: ");
354 print_quoted_type (pp, m_type);
355 pp_string (pp, ", ");
356 }
357
358 /* vfunc. */
359 print_details (model, this_sid, pp);
360
361 pp_string (pp, "}");
362 }
363
364 /* Dump this svalue in the form of a .dot record to PP. */
365
366 void
367 svalue::dump_dot_to_pp (const region_model &model,
368 svalue_id this_sid,
369 pretty_printer *pp) const
370 {
371 this_sid.dump_node_name_to_pp (pp);
372 pp_printf (pp, " [label=\"");
373 pp_write_text_to_stream (pp);
374 this_sid.print (pp);
375 pp_string (pp, ": {");
376 print (model, this_sid, pp);
377 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
378 pp_string (pp, "}\"];");
379 pp_newline (pp);
380 }
381
382 /* Base implementation of svalue::remap_region_ids vfunc. */
383
384 void
385 svalue::remap_region_ids (const region_id_map &)
386 {
387 /* Empty. */
388 }
389
390 /* Base implementation of svalue::walk_for_canonicalization vfunc. */
391
392 void
393 svalue::walk_for_canonicalization (canonicalization *) const
394 {
395 /* Empty. */
396 }
397
398 /* Base implementation of svalue::get_child_sid vfunc. */
399
400 svalue_id
401 svalue::get_child_sid (region *parent ATTRIBUTE_UNUSED,
402 region *child,
403 region_model &model,
404 region_model_context *ctxt ATTRIBUTE_UNUSED)
405 {
406 svalue *new_child_value = clone ();
407 if (child->get_type ())
408 new_child_value->m_type = child->get_type ();
409 svalue_id new_child_sid = model.add_svalue (new_child_value);
410 return new_child_sid;
411 }
412
413 /* If this svalue is a constant_svalue, return the underlying tree constant.
414 Otherwise return NULL_TREE. */
415
416 tree
417 svalue::maybe_get_constant () const
418 {
419 if (const constant_svalue *cst_sval = dyn_cast_constant_svalue ())
420 return cst_sval->get_constant ();
421 else
422 return NULL_TREE;
423 }
424
425 /* class region_svalue : public svalue. */
426
427 /* Compare the fields of this region_svalue with OTHER, returning true
428 if they are equal.
429 For use by svalue::operator==. */
430
431 bool
432 region_svalue::compare_fields (const region_svalue &other) const
433 {
434 return m_rid == other.m_rid;
435 }
436
437 /* Implementation of svalue::add_to_hash vfunc for region_svalue. */
438
439 void
440 region_svalue::add_to_hash (inchash::hash &hstate) const
441 {
442 inchash::add (m_rid, hstate);
443 }
444
445 /* Implementation of svalue::print_details vfunc for region_svalue. */
446
447 void
448 region_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
449 svalue_id this_sid ATTRIBUTE_UNUSED,
450 pretty_printer *pp) const
451 {
452 if (m_rid.null_p ())
453 pp_string (pp, "NULL");
454 else
455 {
456 pp_string (pp, "&");
457 m_rid.print (pp);
458 }
459 }
460
461 /* Implementation of svalue::dump_dot_to_pp for region_svalue. */
462
463 void
464 region_svalue::dump_dot_to_pp (const region_model &model,
465 svalue_id this_sid,
466 pretty_printer *pp) const
467 {
468 svalue::dump_dot_to_pp (model, this_sid, pp);
469
470 /* If non-NULL, add an edge to the pointed-to region. */
471 if (!m_rid.null_p ())
472 {
473 this_sid.dump_node_name_to_pp (pp);
474 pp_string (pp, " -> ");
475 m_rid.dump_node_name_to_pp (pp);
476 pp_string (pp, ";");
477 pp_newline (pp);
478 }
479 }
480
481 /* Implementation of svalue::remap_region_ids vfunc for region_svalue. */
482
483 void
484 region_svalue::remap_region_ids (const region_id_map &map)
485 {
486 map.update (&m_rid);
487 }
488
489 /* Merge REGION_SVAL_A and REGION_SVAL_B using MERGER, writing the result
490 into *MERGED_SID. */
491
492 void
493 region_svalue::merge_values (const region_svalue &region_sval_a,
494 const region_svalue &region_sval_b,
495 svalue_id *merged_sid,
496 tree type,
497 model_merger *merger)
498 {
499 region_id a_rid = region_sval_a.get_pointee ();
500 region_id b_rid = region_sval_b.get_pointee ();
501
502 /* Both are non-NULL. */
503 gcc_assert (!a_rid.null_p () && !b_rid.null_p ());
504
505 /* Have these ptr-values already been merged? */
506
507 region_id a_rid_in_m
508 = merger->m_map_regions_from_a_to_m.get_dst_for_src (a_rid);
509 region_id b_rid_in_m
510 = merger->m_map_regions_from_b_to_m.get_dst_for_src (b_rid);
511
512 /* "null_p" here means "we haven't seen this ptr-value before".
513 If we've seen one but not the other, or we have different
514 regions, then the merged ptr has to be "unknown". */
515 if (a_rid_in_m != b_rid_in_m)
516 {
517 svalue *merged_sval = new unknown_svalue (type);
518 *merged_sid = merger->m_merged_model->add_svalue (merged_sval);
519 return;
520 }
521
522 /* Have we seen this yet? If so, reuse the value. */
523 if (!a_rid_in_m.null_p ())
524 {
525 *merged_sid
526 = merger->m_merged_model->get_or_create_ptr_svalue (type, a_rid_in_m);
527 return;
528 }
529
530 /* Otherwise we have A/B regions that haven't been referenced yet. */
531
532 /* Are the regions the "same", when seen from the tree point-of-view.
533 If so, create a merged pointer to it. */
534 path_var pv_a = merger->m_model_a->get_representative_path_var (a_rid);
535 path_var pv_b = merger->m_model_b->get_representative_path_var (b_rid);
536 if (pv_a.m_tree
537 && pv_a == pv_b)
538 {
539 region_id merged_pointee_rid
540 = merger->m_merged_model->get_lvalue (pv_a, NULL);
541 *merged_sid
542 = merger->m_merged_model->get_or_create_ptr_svalue (type,
543 merged_pointee_rid);
544 merger->record_regions (a_rid, b_rid, merged_pointee_rid);
545 return;
546 }
547
548 /* Handle an A/B pair of ptrs that both point at heap regions.
549 If they both have a heap region in the merger model, merge them. */
550 region *region_a = merger->m_model_a->get_region (a_rid);
551 region *region_b = merger->m_model_b->get_region (b_rid);
552 region_id a_parent_rid = region_a->get_parent ();
553 region_id b_parent_rid = region_b->get_parent ();
554 region *parent_region_a = merger->m_model_a->get_region (a_parent_rid);
555 region *parent_region_b = merger->m_model_b->get_region (b_parent_rid);
556 if (parent_region_a
557 && parent_region_b
558 && parent_region_a->get_kind () == RK_HEAP
559 && parent_region_b->get_kind () == RK_HEAP)
560 {
561 /* We have an A/B pair of ptrs that both point at heap regions. */
562 /* presumably we want to see if each A/B heap region already
563 has a merged region, and, if so, is it the same one. */
564 // This check is above
565
566 region_id merged_pointee_rid
567 = merger->m_merged_model->add_new_malloc_region ();
568 *merged_sid
569 = merger->m_merged_model->get_or_create_ptr_svalue
570 (type, merged_pointee_rid);
571 merger->record_regions (a_rid, b_rid, merged_pointee_rid);
572 return;
573 }
574
575 /* Two different non-NULL pointers? Merge to unknown. */
576 svalue *merged_sval = new unknown_svalue (type);
577 *merged_sid = merger->m_merged_model->add_svalue (merged_sval);
578 return;
579 }
580
581 /* Implementation of svalue::walk_for_canonicalization vfunc for
582 region_svalue. */
583
584 void
585 region_svalue::walk_for_canonicalization (canonicalization *c) const
586 {
587 c->walk_rid (m_rid);
588 }
589
590 /* Evaluate the condition LHS OP RHS.
591 Subroutine of region_model::eval_condition for when we have a pair of
592 pointers. */
593
594 tristate
595 region_svalue::eval_condition (region_svalue *lhs,
596 enum tree_code op,
597 region_svalue *rhs)
598 {
599 /* See if they point to the same region. */
600 /* TODO: what about child regions where the child is the first child
601 (or descendent)? */
602 region_id lhs_rid = lhs->get_pointee ();
603 region_id rhs_rid = rhs->get_pointee ();
604 switch (op)
605 {
606 default:
607 gcc_unreachable ();
608
609 case EQ_EXPR:
610 if (lhs_rid == rhs_rid)
611 return tristate::TS_TRUE;
612 else
613 return tristate::TS_FALSE;
614 break;
615
616 case NE_EXPR:
617 if (lhs_rid != rhs_rid)
618 return tristate::TS_TRUE;
619 else
620 return tristate::TS_FALSE;
621 break;
622
623 case GE_EXPR:
624 case LE_EXPR:
625 if (lhs_rid == rhs_rid)
626 return tristate::TS_TRUE;
627 break;
628
629 case GT_EXPR:
630 case LT_EXPR:
631 if (lhs_rid == rhs_rid)
632 return tristate::TS_FALSE;
633 break;
634 }
635
636 return tristate::TS_UNKNOWN;
637 }
638
639 /* class constant_svalue : public svalue. */
640
641 /* Compare the fields of this constant_svalue with OTHER, returning true
642 if they are equal.
643 For use by svalue::operator==. */
644
645 bool
646 constant_svalue::compare_fields (const constant_svalue &other) const
647 {
648 return m_cst_expr == other.m_cst_expr;
649 }
650
651 /* Implementation of svalue::add_to_hash vfunc for constant_svalue. */
652
653 void
654 constant_svalue::add_to_hash (inchash::hash &hstate) const
655 {
656 inchash::add_expr (m_cst_expr, hstate);
657 }
658
659 /* Merge the CST_SVAL_A and CST_SVAL_B using MERGER, writing the id of
660 the resulting svalue into *MERGED_SID. */
661
662 void
663 constant_svalue::merge_values (const constant_svalue &cst_sval_a,
664 const constant_svalue &cst_sval_b,
665 svalue_id *merged_sid,
666 model_merger *merger)
667 {
668 tree cst_a = cst_sval_a.get_constant ();
669 tree cst_b = cst_sval_b.get_constant ();
670 svalue *merged_sval;
671 if (cst_a == cst_b)
672 {
673 /* If they are the same constant, merge as that constant value. */
674 merged_sval = new constant_svalue (cst_a);
675 }
676 else
677 {
678 /* Otherwise, we have two different constant values.
679 Merge as an unknown value.
680 TODO: impose constraints on the value?
681 (maybe just based on A, to avoid infinite chains) */
682 merged_sval = new unknown_svalue (TREE_TYPE (cst_a));
683 }
684 *merged_sid = merger->m_merged_model->add_svalue (merged_sval);
685 }
686
687 /* Evaluate the condition LHS OP RHS.
688 Subroutine of region_model::eval_condition for when we have a pair of
689 constants. */
690
691 tristate
692 constant_svalue::eval_condition (constant_svalue *lhs,
693 enum tree_code op,
694 constant_svalue *rhs)
695 {
696 tree lhs_const = lhs->get_constant ();
697 tree rhs_const = rhs->get_constant ();
698
699 gcc_assert (CONSTANT_CLASS_P (lhs_const));
700 gcc_assert (CONSTANT_CLASS_P (rhs_const));
701
702 /* Check for comparable types. */
703 if (types_compatible_p (TREE_TYPE (lhs_const), TREE_TYPE (rhs_const)))
704 {
705 tree comparison
706 = fold_binary (op, boolean_type_node, lhs_const, rhs_const);
707 if (comparison == boolean_true_node)
708 return tristate (tristate::TS_TRUE);
709 if (comparison == boolean_false_node)
710 return tristate (tristate::TS_FALSE);
711 }
712 return tristate::TS_UNKNOWN;
713 }
714
715 /* Implementation of svalue::print_details vfunc for constant_svalue. */
716
717 void
718 constant_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
719 svalue_id this_sid ATTRIBUTE_UNUSED,
720 pretty_printer *pp) const
721 {
722 pp_printf (pp, "%qE", m_cst_expr);
723 }
724
725 /* Implementation of svalue::get_child_sid vfunc for constant_svalue. */
726
727 svalue_id
728 constant_svalue::get_child_sid (region *parent ATTRIBUTE_UNUSED,
729 region *child,
730 region_model &model,
731 region_model_context *ctxt ATTRIBUTE_UNUSED)
732 {
733 /* TODO: handle the all-zeroes case by returning an all-zeroes of the
734 child type. */
735
736 /* Otherwise, we don't have a good way to get a child value out of a
737 constant.
738
739 Handle this case by using an unknown value. */
740 svalue *unknown_sval = new unknown_svalue (child->get_type ());
741 return model.add_svalue (unknown_sval);
742 }
743
744 /* class unknown_svalue : public svalue. */
745
746 /* Compare the fields of this unknown_svalue with OTHER, returning true
747 if they are equal.
748 For use by svalue::operator==. */
749
750 bool
751 unknown_svalue::compare_fields (const unknown_svalue &) const
752 {
753 /* I *think* we want to return true here, in that when comparing
754 two region models, we want two peer unknown_svalue instances
755 to be the "same". */
756 return true;
757 }
758
759 /* Implementation of svalue::add_to_hash vfunc for unknown_svalue. */
760
761 void
762 unknown_svalue::add_to_hash (inchash::hash &) const
763 {
764 /* Empty. */
765 }
766
767 /* Implementation of svalue::print_details vfunc for unknown_svalue. */
768
769 void
770 unknown_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
771 svalue_id this_sid ATTRIBUTE_UNUSED,
772 pretty_printer *pp) const
773 {
774 pp_string (pp, "unknown");
775 }
776
777 /* Get a string for KIND for use in debug dumps. */
778
779 const char *
780 poison_kind_to_str (enum poison_kind kind)
781 {
782 switch (kind)
783 {
784 default:
785 gcc_unreachable ();
786 case POISON_KIND_UNINIT:
787 return "uninit";
788 case POISON_KIND_FREED:
789 return "freed";
790 case POISON_KIND_POPPED_STACK:
791 return "popped stack";
792 }
793 }
794
795 /* class poisoned_svalue : public svalue. */
796
797 /* Compare the fields of this poisoned_svalue with OTHER, returning true
798 if they are equal.
799 For use by svalue::operator==. */
800
801 bool
802 poisoned_svalue::compare_fields (const poisoned_svalue &other) const
803 {
804 return m_kind == other.m_kind;
805 }
806
807 /* Implementation of svalue::add_to_hash vfunc for poisoned_svalue. */
808
809 void
810 poisoned_svalue::add_to_hash (inchash::hash &hstate) const
811 {
812 hstate.add_int (m_kind);
813 }
814
815 /* Implementation of svalue::print_details vfunc for poisoned_svalue. */
816
817 void
818 poisoned_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
819 svalue_id this_sid ATTRIBUTE_UNUSED,
820 pretty_printer *pp) const
821 {
822 pp_printf (pp, "poisoned: %s", poison_kind_to_str (m_kind));
823 }
824
825 /* class setjmp_svalue's implementation is in engine.cc, so that it can use
826 the declaration of exploded_node. */
827
828 /* class region and its various subclasses. */
829
830 /* Get a string for KIND for use in debug dumps. */
831
832 const char *
833 region_kind_to_str (enum region_kind kind)
834 {
835 switch (kind)
836 {
837 default:
838 gcc_unreachable ();
839 case RK_PRIMITIVE:
840 return "primitive";
841 case RK_STRUCT:
842 return "struct";
843 case RK_UNION:
844 return "union";
845 case RK_ARRAY:
846 return "array";
847 case RK_FRAME:
848 return "frame";
849 case RK_GLOBALS:
850 return "globals";
851 case RK_CODE:
852 return "code";
853 case RK_FUNCTION:
854 return "function";
855 case RK_STACK:
856 return "stack";
857 case RK_HEAP:
858 return "heap";
859 case RK_ROOT:
860 return "root";
861 case RK_SYMBOLIC:
862 return "symbolic";
863 }
864 }
865
866 /* class region. */
867
868 /* Equality operator for region.
869 After comparing base class fields and kind, the rest of the
870 comparison is handled off to a "compare_fields" member function
871 specific to the appropriate subclass. */
872
873 bool
874 region::operator== (const region &other) const
875 {
876 if (m_parent_rid != other.m_parent_rid)
877 return false;
878 if (m_sval_id != other.m_sval_id)
879 return false;
880 if (m_type != other.m_type)
881 return false;
882
883 enum region_kind this_kind = get_kind ();
884 enum region_kind other_kind = other.get_kind ();
885 if (this_kind != other_kind)
886 return false;
887
888 /* Compare views. */
889 if (m_view_rids.length () != other.m_view_rids.length ())
890 return false;
891 int i;
892 region_id *rid;
893 FOR_EACH_VEC_ELT (m_view_rids, i, rid)
894 if (! (*rid == other.m_view_rids[i]))
895 return false;
896
897 switch (this_kind)
898 {
899 default:
900 gcc_unreachable ();
901 case RK_PRIMITIVE:
902 {
903 #if 1
904 return true;
905 #else
906 const primitive_region &this_sub
907 = (const primitive_region &)*this;
908 const primitive_region &other_sub
909 = (const primitive_region &)other;
910 return this_sub.compare_fields (other_sub);
911 #endif
912 }
913 case RK_STRUCT:
914 {
915 const struct_region &this_sub
916 = (const struct_region &)*this;
917 const struct_region &other_sub
918 = (const struct_region &)other;
919 return this_sub.compare_fields (other_sub);
920 }
921 case RK_UNION:
922 {
923 const union_region &this_sub
924 = (const union_region &)*this;
925 const union_region &other_sub
926 = (const union_region &)other;
927 return this_sub.compare_fields (other_sub);
928 }
929 case RK_ARRAY:
930 {
931 const array_region &this_sub
932 = (const array_region &)*this;
933 const array_region &other_sub
934 = (const array_region &)other;
935 return this_sub.compare_fields (other_sub);
936 }
937 case RK_FRAME:
938 {
939 const frame_region &this_sub
940 = (const frame_region &)*this;
941 const frame_region &other_sub
942 = (const frame_region &)other;
943 return this_sub.compare_fields (other_sub);
944 }
945 case RK_GLOBALS:
946 {
947 const globals_region &this_sub
948 = (const globals_region &)*this;
949 const globals_region &other_sub
950 = (const globals_region &)other;
951 return this_sub.compare_fields (other_sub);
952 }
953 case RK_CODE:
954 {
955 const code_region &this_sub
956 = (const code_region &)*this;
957 const code_region &other_sub
958 = (const code_region &)other;
959 return this_sub.compare_fields (other_sub);
960 }
961 case RK_FUNCTION:
962 {
963 const function_region &this_sub
964 = (const function_region &)*this;
965 const function_region &other_sub
966 = (const function_region &)other;
967 return this_sub.compare_fields (other_sub);
968 }
969 case RK_STACK:
970 {
971 const stack_region &this_sub
972 = (const stack_region &)*this;
973 const stack_region &other_sub
974 = (const stack_region &)other;
975 return this_sub.compare_fields (other_sub);
976 }
977 case RK_ROOT:
978 {
979 const root_region &this_sub
980 = (const root_region &)*this;
981 const root_region &other_sub
982 = (const root_region &)other;
983 return this_sub.compare_fields (other_sub);
984 }
985 case RK_SYMBOLIC:
986 {
987 const symbolic_region &this_sub
988 = (const symbolic_region &)*this;
989 const symbolic_region &other_sub
990 = (const symbolic_region &)other;
991 return this_sub.compare_fields (other_sub);
992 }
993 case RK_HEAP:
994 {
995 const heap_region &this_sub
996 = (const heap_region &)*this;
997 const heap_region &other_sub
998 = (const heap_region &)other;
999 return this_sub.compare_fields (other_sub);
1000 }
1001 }
1002 }
1003
1004 /* Get the parent region of this region. */
1005
1006 region *
1007 region::get_parent_region (const region_model &model) const
1008 {
1009 return model.get_region (m_parent_rid);
1010 }
1011
1012 /* Set this region's value to RHS_SID (or potentially a variant of it,
1013 for some kinds of casts). */
1014
1015 void
1016 region::set_value (region_model &model, region_id this_rid, svalue_id rhs_sid,
1017 region_model_context *ctxt)
1018 {
1019 /* Handle some kinds of casting. */
1020 if (m_type)
1021 {
1022 svalue *sval = model.get_svalue (rhs_sid);
1023 if (sval->get_type ())
1024 rhs_sid = model.maybe_cast (m_type, rhs_sid, ctxt);
1025
1026 sval = model.get_svalue (rhs_sid);
1027 if (sval->get_type ())
1028 gcc_assert (m_type == sval->get_type ());
1029 }
1030
1031 m_sval_id = rhs_sid;
1032
1033 /* Update views.
1034 If this is a view, it becomes its parent's active view.
1035 If there was already an active views, invalidate its value; otherwise
1036 if the parent itself had a value, invalidate it.
1037 If it's not a view, then deactivate any view that is active on this
1038 region. */
1039 {
1040 if (m_is_view)
1041 become_active_view (model, this_rid);
1042 else
1043 {
1044 deactivate_any_active_view (model);
1045 gcc_assert (m_active_view_rid.null_p ());
1046 }
1047 }
1048 }
1049
1050 /* Make this region (with id THIS_RID) the "active" view of its parent.
1051 Any other active view has its value set to "unknown" and descendent values
1052 cleared.
1053 If there wasn't an active view, then set the parent's value to unknown, and
1054 clear its descendent values (apart from this view). */
1055
1056 void
1057 region::become_active_view (region_model &model, region_id this_rid)
1058 {
1059 gcc_assert (m_is_view);
1060
1061 region *parent_reg = model.get_region (m_parent_rid);
1062 gcc_assert (parent_reg);
1063
1064 region_id old_active_view_rid = parent_reg->m_active_view_rid;
1065
1066 if (old_active_view_rid == this_rid)
1067 {
1068 /* Already the active view: do nothing. */
1069 return;
1070 }
1071
1072 /* We have a change of active view. */
1073 parent_reg->m_active_view_rid = this_rid;
1074
1075 if (old_active_view_rid.null_p ())
1076 {
1077 /* No previous active view, but the parent and its other children
1078 might have values.
1079 If so, invalidate those values - but not that of the new view. */
1080 region_id_set below_region (&model);
1081 model.get_descendents (m_parent_rid, &below_region, this_rid);
1082 for (unsigned i = 0; i < model.get_num_regions (); i++)
1083 {
1084 region_id rid (region_id::from_int (i));
1085 if (below_region.region_p (rid))
1086 {
1087 region *other_reg = model.get_region (rid);
1088 other_reg->m_sval_id = svalue_id::null ();
1089 }
1090 }
1091 region *parent = model.get_region (m_parent_rid);
1092 parent->m_sval_id
1093 = model.add_svalue (new unknown_svalue (parent->get_type ()));
1094 }
1095 else
1096 {
1097 /* If there was an active view, invalidate it. */
1098 region *old_active_view = model.get_region (old_active_view_rid);
1099 old_active_view->deactivate_view (model, old_active_view_rid);
1100 }
1101 }
1102
1103 /* If this region (with id THIS_RID) has an active view, deactivate it,
1104 clearing m_active_view_rid. */
1105
1106 void
1107 region::deactivate_any_active_view (region_model &model)
1108 {
1109 if (m_active_view_rid.null_p ())
1110 return;
1111 region *view = model.get_region (m_active_view_rid);
1112 view->deactivate_view (model, m_active_view_rid);
1113 m_active_view_rid = region_id::null ();
1114 }
1115
1116 /* Clear any values for regions below THIS_RID.
1117 Set the view's value to unknown. */
1118
1119 void
1120 region::deactivate_view (region_model &model, region_id this_view_rid)
1121 {
1122 gcc_assert (is_view_p ());
1123
1124 /* Purge values from old_active_this_view_rid and all its
1125 descendents. Potentially we could use a poison value
1126 for this, but let's use unknown for now. */
1127 region_id_set below_view (&model);
1128 model.get_descendents (this_view_rid, &below_view, region_id::null ());
1129
1130 for (unsigned i = 0; i < model.get_num_regions (); i++)
1131 {
1132 region_id rid (region_id::from_int (i));
1133 if (below_view.region_p (rid))
1134 {
1135 region *other_reg = model.get_region (rid);
1136 other_reg->m_sval_id = svalue_id::null ();
1137 }
1138 }
1139
1140 m_sval_id = model.add_svalue (new unknown_svalue (get_type ()));
1141 }
1142
1143 /* Get a value for this region, either its value if it has one,
1144 or, failing that, "inherit" a value from first ancestor with a
1145 non-null value.
1146
1147 For example, when getting the value for a local variable within
1148 a stack frame that doesn't have one, the frame doesn't have a value
1149 either, but the stack as a whole will have an "uninitialized" poison
1150 value, so inherit that. */
1151
1152 svalue_id
1153 region::get_value (region_model &model, bool non_null,
1154 region_model_context *ctxt)
1155 {
1156 /* If this region has a value, use it. */
1157 if (!m_sval_id.null_p ())
1158 return m_sval_id;
1159
1160 /* Otherwise, "inherit" value from first ancestor with a
1161 non-null value. */
1162
1163 region *parent = model.get_region (m_parent_rid);
1164 if (parent)
1165 {
1166 svalue_id inherited_sid
1167 = parent->get_inherited_child_sid (this, model, ctxt);
1168 if (!inherited_sid.null_p ())
1169 return inherited_sid;
1170 }
1171
1172 /* If a non-null value has been requested, then generate
1173 a new unknown value. Store it, so that repeated reads from this
1174 region will yield the same unknown value. */
1175 if (non_null)
1176 {
1177 svalue_id unknown_sid = model.add_svalue (new unknown_svalue (m_type));
1178 m_sval_id = unknown_sid;
1179 return unknown_sid;
1180 }
1181
1182 return svalue_id::null ();
1183 }
1184
1185 /* Get a value for CHILD, inheriting from this region.
1186
1187 Recurse, so this region will inherit a value if it doesn't already
1188 have one. */
1189
1190 svalue_id
1191 region::get_inherited_child_sid (region *child,
1192 region_model &model,
1193 region_model_context *ctxt)
1194 {
1195 if (m_sval_id.null_p ())
1196 {
1197 /* Recurse. */
1198 if (!m_parent_rid.null_p ())
1199 {
1200 region *parent = model.get_region (m_parent_rid);
1201 m_sval_id = parent->get_inherited_child_sid (this, model, ctxt);
1202 }
1203 }
1204
1205 if (!m_sval_id.null_p ())
1206 {
1207 /* Clone the parent's value, so that attempts to update it
1208 (e.g giving a specific value to an inherited "uninitialized"
1209 value) touch the child, and not the parent. */
1210 svalue *this_value = model.get_svalue (m_sval_id);
1211 svalue_id new_child_sid
1212 = this_value->get_child_sid (this, child, model, ctxt);
1213 if (ctxt)
1214 ctxt->on_inherited_svalue (m_sval_id, new_child_sid);
1215 child->m_sval_id = new_child_sid;
1216 return new_child_sid;
1217 }
1218
1219 return svalue_id::null ();
1220 }
1221
1222 /* Generate a hash value for this region. The work is done by the
1223 add_to_hash vfunc. */
1224
1225 hashval_t
1226 region::hash () const
1227 {
1228 inchash::hash hstate;
1229 add_to_hash (hstate);
1230 return hstate.end ();
1231 }
1232
1233 /* Print a one-liner representation of this region to PP, assuming
1234 that this region is within MODEL and its id is THIS_RID. */
1235
1236 void
1237 region::print (const region_model &model,
1238 region_id this_rid,
1239 pretty_printer *pp) const
1240 {
1241 this_rid.print (pp);
1242 pp_string (pp, ": {");
1243
1244 /* vfunc. */
1245 print_fields (model, this_rid, pp);
1246
1247 pp_string (pp, "}");
1248 }
1249
1250 /* Base class implementation of region::dump_dot_to_pp vfunc. */
1251
1252 void
1253 region::dump_dot_to_pp (const region_model &model,
1254 region_id this_rid,
1255 pretty_printer *pp) const
1256 {
1257 this_rid.dump_node_name_to_pp (pp);
1258 pp_printf (pp, " [shape=none,margin=0,style=filled,fillcolor=%s,label=\"",
1259 "lightgrey");
1260 pp_write_text_to_stream (pp);
1261 print (model, this_rid, pp);
1262 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
1263 pp_string (pp, "\"];");
1264 pp_newline (pp);
1265
1266 /* Add edge to svalue. */
1267 if (!m_sval_id.null_p ())
1268 {
1269 this_rid.dump_node_name_to_pp (pp);
1270 pp_string (pp, " -> ");
1271 m_sval_id.dump_node_name_to_pp (pp);
1272 pp_string (pp, ";");
1273 pp_newline (pp);
1274 }
1275
1276 /* Add edge to parent. */
1277 if (!m_parent_rid.null_p ())
1278 {
1279 this_rid.dump_node_name_to_pp (pp);
1280 pp_string (pp, " -> ");
1281 m_parent_rid.dump_node_name_to_pp (pp);
1282 pp_string (pp, ";");
1283 pp_newline (pp);
1284 }
1285 }
1286
1287 /* Dump a tree-like ASCII-art representation of this region to PP. */
1288
1289 void
1290 region::dump_to_pp (const region_model &model,
1291 region_id this_rid,
1292 pretty_printer *pp,
1293 const char *prefix,
1294 bool is_last_child) const
1295 {
1296 print (model, this_rid, pp);
1297 pp_newline (pp);
1298
1299 const char *new_prefix;
1300 if (!m_parent_rid.null_p ())
1301 new_prefix = ACONCAT ((prefix, is_last_child ? " " : "| ", NULL));
1302 else
1303 new_prefix = prefix;
1304
1305 const char *begin_color = colorize_start (pp_show_color (pp), "note");
1306 const char *end_color = colorize_stop (pp_show_color (pp));
1307 char *field_prefix
1308 = ACONCAT ((begin_color, new_prefix, "|:", end_color, NULL));
1309
1310 if (!m_sval_id.null_p ())
1311 {
1312 pp_printf (pp, "%s sval: ", field_prefix);
1313 model.get_svalue (m_sval_id)->print (model, m_sval_id, pp);
1314 pp_newline (pp);
1315 }
1316 if (m_type)
1317 {
1318 pp_printf (pp, "%s type: ", field_prefix);
1319 print_quoted_type (pp, m_type);
1320 pp_newline (pp);
1321 }
1322
1323 /* Find the children. */
1324
1325 auto_vec<region_id> child_rids;
1326 unsigned i;
1327 for (unsigned i = 0; i < model.get_num_regions (); ++i)
1328 {
1329 region_id rid = region_id::from_int (i);
1330 region *child = model.get_region (rid);
1331 if (child->m_parent_rid == this_rid)
1332 child_rids.safe_push (rid);
1333 }
1334
1335 /* Print the children, using dump_child_label to label them. */
1336
1337 region_id *child_rid;
1338 FOR_EACH_VEC_ELT (child_rids, i, child_rid)
1339 {
1340 is_last_child = (i == child_rids.length () - 1);
1341 if (!this_rid.null_p ())
1342 {
1343 const char *tail = is_last_child ? "`-" : "|-";
1344 pp_printf (pp, "%r%s%s%R", "note", new_prefix, tail);
1345 }
1346 dump_child_label (model, this_rid, *child_rid, pp);
1347 model.get_region (*child_rid)->dump_to_pp (model, *child_rid, pp,
1348 new_prefix,
1349 is_last_child);
1350 }
1351 }
1352
1353 /* Base implementation of region::dump_child_label vfunc. */
1354
1355 void
1356 region::dump_child_label (const region_model &model,
1357 region_id this_rid ATTRIBUTE_UNUSED,
1358 region_id child_rid,
1359 pretty_printer *pp) const
1360 {
1361 region *child = model.get_region (child_rid);
1362 if (child->m_is_view)
1363 {
1364 gcc_assert (TYPE_P (child->get_type ()));
1365 if (m_active_view_rid == child_rid)
1366 pp_string (pp, "active ");
1367 else
1368 pp_string (pp, "inactive ");
1369 pp_string (pp, "view as ");
1370 print_quoted_type (pp, child->get_type ());
1371 pp_string (pp, ": ");
1372 }
1373 }
1374
1375 /* Base implementation of region::validate vfunc.
1376 Assert that the fields of "region" are valid; subclasses should
1377 chain up their implementation to this one. */
1378
1379 void
1380 region::validate (const region_model &model) const
1381 {
1382 m_parent_rid.validate (model);
1383 m_sval_id.validate (model);
1384 unsigned i;
1385 region_id *view_rid;
1386 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid)
1387 {
1388 gcc_assert (!view_rid->null_p ());
1389 view_rid->validate (model);
1390 }
1391 m_active_view_rid.validate (model);
1392 }
1393
1394 /* Apply MAP to svalue_ids to this region. This updates the value
1395 for the region (if any). */
1396
1397 void
1398 region::remap_svalue_ids (const svalue_id_map &map)
1399 {
1400 map.update (&m_sval_id);
1401 }
1402
1403 /* Base implementation of region::remap_region_ids vfunc; subclasses should
1404 chain up to this, updating any region_id data. */
1405
1406 void
1407 region::remap_region_ids (const region_id_map &map)
1408 {
1409 map.update (&m_parent_rid);
1410 unsigned i;
1411 region_id *view_rid;
1412 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid)
1413 map.update (view_rid);
1414 map.update (&m_active_view_rid);
1415 }
1416
1417 /* Add a new region with id VIEW_RID as a view of this region. */
1418
1419 void
1420 region::add_view (region_id view_rid, region_model *model)
1421 {
1422 gcc_assert (!view_rid.null_p ());
1423 region *new_view = model->get_region (view_rid);
1424 new_view->m_is_view = true;
1425 gcc_assert (!new_view->m_parent_rid.null_p ());
1426 gcc_assert (new_view->m_sval_id.null_p ());
1427
1428 //gcc_assert (new_view->get_type () != NULL_TREE);
1429 // TODO: this can sometimes be NULL, when viewing through a (void *)
1430
1431 // TODO: the type ought to not be present yet
1432
1433 m_view_rids.safe_push (view_rid);
1434 }
1435
1436 /* Look for a view of type TYPE of this region, returning its id if found,
1437 or null otherwise. */
1438
1439 region_id
1440 region::get_view (tree type, region_model *model) const
1441 {
1442 unsigned i;
1443 region_id *view_rid;
1444 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid)
1445 {
1446 region *view = model->get_region (*view_rid);
1447 gcc_assert (view->m_is_view);
1448 if (view->get_type () == type)
1449 return *view_rid;
1450 }
1451 return region_id::null ();
1452 }
1453
1454 /* region's ctor. */
1455
1456 region::region (region_id parent_rid, svalue_id sval_id, tree type)
1457 : m_parent_rid (parent_rid), m_sval_id (sval_id), m_type (type),
1458 m_view_rids (), m_is_view (false), m_active_view_rid (region_id::null ())
1459 {
1460 gcc_assert (type == NULL_TREE || TYPE_P (type));
1461 }
1462
1463 /* region's copy ctor. */
1464
1465 region::region (const region &other)
1466 : m_parent_rid (other.m_parent_rid), m_sval_id (other.m_sval_id),
1467 m_type (other.m_type), m_view_rids (other.m_view_rids.length ()),
1468 m_is_view (other.m_is_view), m_active_view_rid (other.m_active_view_rid)
1469 {
1470 int i;
1471 region_id *rid;
1472 FOR_EACH_VEC_ELT (other.m_view_rids, i, rid)
1473 m_view_rids.quick_push (*rid);
1474 }
1475
1476 /* Base implementation of region::add_to_hash vfunc; subclasses should
1477 chain up to this. */
1478
1479 void
1480 region::add_to_hash (inchash::hash &hstate) const
1481 {
1482 inchash::add (m_parent_rid, hstate);
1483 inchash::add (m_sval_id, hstate);
1484 hstate.add_ptr (m_type);
1485 // TODO: views
1486 }
1487
1488 /* Base implementation of region::print_fields vfunc. */
1489
1490 void
1491 region::print_fields (const region_model &model ATTRIBUTE_UNUSED,
1492 region_id this_rid ATTRIBUTE_UNUSED,
1493 pretty_printer *pp) const
1494 {
1495 pp_printf (pp, "kind: %qs", region_kind_to_str (get_kind ()));
1496
1497 pp_string (pp, ", parent: ");
1498 m_parent_rid.print (pp);
1499
1500 pp_printf (pp, ", sval: ");
1501 m_sval_id.print (pp);
1502
1503 if (m_type)
1504 {
1505 pp_printf (pp, ", type: ");
1506 print_quoted_type (pp, m_type);
1507 }
1508 }
1509
1510 /* Determine if a pointer to this region must be non-NULL.
1511
1512 Generally, pointers to regions must be non-NULL, but pointers
1513 to symbolic_regions might, in fact, be NULL.
1514
1515 This allows us to simulate functions like malloc and calloc with:
1516 - only one "outcome" from each statement,
1517 - the idea that the pointer is on the heap if non-NULL
1518 - the possibility that the pointer could be NULL
1519 - the idea that successive values returned from malloc are non-equal
1520 - to be able to zero-fill for calloc. */
1521
1522 bool
1523 region::non_null_p (const region_model &model) const
1524 {
1525 /* Look through views to get at the underlying region. */
1526 if (is_view_p ())
1527 return model.get_region (m_parent_rid)->non_null_p (model);
1528
1529 /* Are we within a symbolic_region? If so, it could be NULL. */
1530 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
1531 {
1532 if (sym_reg->m_possibly_null)
1533 return false;
1534 }
1535
1536 return true;
1537 }
1538
1539 /* class primitive_region : public region. */
1540
1541 /* Implementation of region::clone vfunc for primitive_region. */
1542
1543 region *
1544 primitive_region::clone () const
1545 {
1546 return new primitive_region (*this);
1547 }
1548
1549 /* Implementation of region::walk_for_canonicalization vfunc for
1550 primitive_region. */
1551
1552 void
1553 primitive_region::walk_for_canonicalization (canonicalization *) const
1554 {
1555 /* Empty. */
1556 }
1557
1558 /* class map_region : public region. */
1559
1560 /* map_region's copy ctor. */
1561
1562 map_region::map_region (const map_region &other)
1563 : region (other),
1564 m_map (other.m_map)
1565 {
1566 }
1567
1568 /* Compare the fields of this map_region with OTHER, returning true
1569 if they are equal.
1570 For use by region::operator==. */
1571
1572 bool
1573 map_region::compare_fields (const map_region &other) const
1574 {
1575 if (m_map.elements () != other.m_map.elements ())
1576 return false;
1577
1578 for (map_t::iterator iter = m_map.begin ();
1579 iter != m_map.end ();
1580 ++iter)
1581 {
1582 tree key = (*iter).first;
1583 region_id e = (*iter).second;
1584 region_id *other_slot = const_cast <map_t &> (other.m_map).get (key);
1585 if (other_slot == NULL)
1586 return false;
1587 if (e != *other_slot)
1588 return false;
1589 }
1590 return true;
1591 }
1592
1593 /* Implementation of region::print_fields vfunc for map_region. */
1594
1595 void
1596 map_region::print_fields (const region_model &model,
1597 region_id this_rid,
1598 pretty_printer *pp) const
1599 {
1600 region::print_fields (model, this_rid, pp);
1601 pp_string (pp, ", map: {");
1602 for (map_t::iterator iter = m_map.begin ();
1603 iter != m_map.end ();
1604 ++iter)
1605 {
1606 if (iter != m_map.begin ())
1607 pp_string (pp, ", ");
1608 tree expr = (*iter).first;
1609 region_id child_rid = (*iter).second;
1610 dump_quoted_tree (pp, expr);
1611 pp_string (pp, ": ");
1612 child_rid.print (pp);
1613 }
1614 pp_string (pp, "}");
1615 }
1616
1617 /* Implementation of region::validate vfunc for map_region. */
1618
1619 void
1620 map_region::validate (const region_model &model) const
1621 {
1622 region::validate (model);
1623 for (map_t::iterator iter = m_map.begin ();
1624 iter != m_map.end ();
1625 ++iter)
1626 {
1627 region_id child_rid = (*iter).second;
1628 child_rid.validate (model);
1629 }
1630 }
1631
1632 /* Implementation of region::dump_dot_to_pp vfunc for map_region. */
1633
1634 void
1635 map_region::dump_dot_to_pp (const region_model &model,
1636 region_id this_rid,
1637 pretty_printer *pp) const
1638 {
1639 region::dump_dot_to_pp (model, this_rid, pp);
1640 for (map_t::iterator iter = m_map.begin ();
1641 iter != m_map.end ();
1642 ++iter)
1643 {
1644 // TODO: add nodes/edges to label things
1645
1646 tree expr = (*iter).first;
1647 region_id child_rid = (*iter).second;
1648
1649 pp_printf (pp, "rid_label_%i [label=\"", child_rid.as_int ());
1650 pp_write_text_to_stream (pp);
1651 pp_printf (pp, "%qE", expr);
1652 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
1653 pp_string (pp, "\"];");
1654 pp_newline (pp);
1655
1656 pp_printf (pp, "rid_label_%i", child_rid.as_int ());
1657 pp_string (pp, " -> ");
1658 child_rid.dump_node_name_to_pp (pp);
1659 pp_string (pp, ";");
1660 pp_newline (pp);
1661 }
1662 }
1663
1664 /* Implementation of region::dump_child_label vfunc for map_region. */
1665
1666 void
1667 map_region::dump_child_label (const region_model &model,
1668 region_id this_rid,
1669 region_id child_rid,
1670 pretty_printer *pp) const
1671 {
1672 region::dump_child_label (model, this_rid, child_rid, pp);
1673
1674 for (map_t::iterator iter = m_map.begin ();
1675 iter != m_map.end ();
1676 ++iter)
1677 {
1678 if (child_rid == (*iter).second)
1679 {
1680 tree key = (*iter).first;
1681 dump_quoted_tree (pp, key);
1682 pp_string (pp, ": ");
1683 }
1684 }
1685 }
1686
1687 /* Look for a child region for KEY within this map_region.
1688 If it doesn't already exist, create a child map_region, using TYPE for
1689 its type.
1690 Return the region_id of the child (whether pre-existing, or
1691 newly-created).
1692 Notify CTXT if we don't know how to handle TYPE. */
1693
1694 region_id
1695 map_region::get_or_create (region_model *model,
1696 region_id this_rid,
1697 tree key,
1698 tree type,
1699 region_model_context *ctxt)
1700 {
1701 gcc_assert (key);
1702 gcc_assert (valid_key_p (key));
1703 region_id *slot = m_map.get (key);
1704 if (slot)
1705 return *slot;
1706 region_id child_rid = model->add_region_for_type (this_rid, type, ctxt);
1707 m_map.put (key, child_rid);
1708 return child_rid;
1709 }
1710
1711 /* Get the region_id for the child region for KEY within this
1712 MAP_REGION, or NULL if there is no such child region. */
1713
1714 region_id *
1715 map_region::get (tree key)
1716 {
1717 gcc_assert (key);
1718 gcc_assert (valid_key_p (key));
1719 region_id *slot = m_map.get (key);
1720 return slot;
1721 }
1722
1723 /* Implementation of region::add_to_hash vfunc for map_region. */
1724
1725 void
1726 map_region::add_to_hash (inchash::hash &hstate) const
1727 {
1728 region::add_to_hash (hstate);
1729 // TODO
1730 }
1731
1732 /* Implementation of region::remap_region_ids vfunc for map_region. */
1733
1734 void
1735 map_region::remap_region_ids (const region_id_map &map)
1736 {
1737 region::remap_region_ids (map);
1738
1739 /* Remap the region ids within the map entries. */
1740 for (map_t::iterator iter = m_map.begin ();
1741 iter != m_map.end (); ++iter)
1742 map.update (&(*iter).second);
1743 }
1744
1745 /* Remove the binding of KEY to its child region (but not the
1746 child region itself).
1747 For use when purging unneeded SSA names. */
1748
1749 void
1750 map_region::unbind (tree key)
1751 {
1752 gcc_assert (key);
1753 gcc_assert (valid_key_p (key));
1754 m_map.remove (key);
1755 }
1756
1757 /* Look for a child region with id CHILD_RID within this map_region.
1758 If one is found, return its tree key, otherwise return NULL_TREE. */
1759
1760 tree
1761 map_region::get_tree_for_child_region (region_id child_rid) const
1762 {
1763 // TODO: do we want to store an inverse map?
1764 for (map_t::iterator iter = m_map.begin ();
1765 iter != m_map.end ();
1766 ++iter)
1767 {
1768 tree key = (*iter).first;
1769 region_id r = (*iter).second;
1770 if (r == child_rid)
1771 return key;
1772 }
1773
1774 return NULL_TREE;
1775 }
1776
1777 /* Look for a child region CHILD within this map_region.
1778 If one is found, return its tree key, otherwise return NULL_TREE. */
1779
1780 tree
1781 map_region::get_tree_for_child_region (region *child,
1782 const region_model &model) const
1783 {
1784 // TODO: do we want to store an inverse map?
1785 for (map_t::iterator iter = m_map.begin ();
1786 iter != m_map.end ();
1787 ++iter)
1788 {
1789 tree key = (*iter).first;
1790 region_id r = (*iter).second;
1791 if (model.get_region (r) == child)
1792 return key;
1793 }
1794
1795 return NULL_TREE;
1796 }
1797
1798 /* Comparator for trees to impose a deterministic ordering on
1799 T1 and T2. */
1800
1801 static int
1802 tree_cmp (const_tree t1, const_tree t2)
1803 {
1804 gcc_assert (t1);
1805 gcc_assert (t2);
1806
1807 /* Test tree codes first. */
1808 if (TREE_CODE (t1) != TREE_CODE (t2))
1809 return TREE_CODE (t1) - TREE_CODE (t2);
1810
1811 /* From this point on, we know T1 and T2 have the same tree code. */
1812
1813 if (DECL_P (t1))
1814 {
1815 if (DECL_NAME (t1) && DECL_NAME (t2))
1816 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
1817 IDENTIFIER_POINTER (DECL_NAME (t2)));
1818 else
1819 {
1820 if (DECL_NAME (t1))
1821 return -1;
1822 else if (DECL_NAME (t2))
1823 return 1;
1824 else
1825 return DECL_UID (t1) - DECL_UID (t2);
1826 }
1827 }
1828
1829 switch (TREE_CODE (t1))
1830 {
1831 case SSA_NAME:
1832 {
1833 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
1834 {
1835 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
1836 if (var_cmp)
1837 return var_cmp;
1838 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1839 }
1840 else
1841 {
1842 if (SSA_NAME_VAR (t1))
1843 return -1;
1844 else if (SSA_NAME_VAR (t2))
1845 return 1;
1846 else
1847 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1848 }
1849 }
1850 break;
1851
1852 case INTEGER_CST:
1853 return tree_int_cst_compare (t1, t2);
1854
1855 case REAL_CST:
1856 {
1857 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
1858 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
1859 if (real_compare (UNORDERED_EXPR, rv1, rv2))
1860 {
1861 /* Impose an arbitrary order on NaNs relative to other NaNs
1862 and to non-NaNs. */
1863 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
1864 return cmp_isnan;
1865 if (int cmp_issignaling_nan
1866 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
1867 return cmp_issignaling_nan;
1868 return real_isneg (rv1) - real_isneg (rv2);
1869 }
1870 if (real_compare (LT_EXPR, rv1, rv2))
1871 return -1;
1872 if (real_compare (GT_EXPR, rv1, rv2))
1873 return 1;
1874 return 0;
1875 }
1876
1877 case STRING_CST:
1878 return strcmp (TREE_STRING_POINTER (t1),
1879 TREE_STRING_POINTER (t2));
1880
1881 default:
1882 gcc_unreachable ();
1883 break;
1884 }
1885
1886 gcc_unreachable ();
1887
1888 return 0;
1889 }
1890
1891 /* qsort comparator for trees to impose a deterministic ordering on
1892 P1 and P2. */
1893
1894 static int
1895 tree_cmp (const void *p1, const void *p2)
1896 {
1897 const_tree t1 = *(const_tree const *)p1;
1898 const_tree t2 = *(const_tree const *)p2;
1899
1900 return tree_cmp (t1, t2);
1901 }
1902
1903 /* Attempt to merge MAP_REGION_A and MAP_REGION_B into MERGED_MAP_REGION,
1904 which has region_id MERGED_RID, using MERGER.
1905 Return true if the merger is possible, false otherwise. */
1906
1907 bool
1908 map_region::can_merge_p (const map_region *map_region_a,
1909 const map_region *map_region_b,
1910 map_region *merged_map_region,
1911 region_id merged_rid,
1912 model_merger *merger)
1913 {
1914 for (map_t::iterator iter = map_region_a->m_map.begin ();
1915 iter != map_region_a->m_map.end ();
1916 ++iter)
1917 {
1918 tree key_a = (*iter).first;
1919 region_id rid_a = (*iter).second;
1920
1921 if (const region_id *slot_b
1922 = const_cast<map_region *>(map_region_b)->m_map.get (key_a))
1923 {
1924 region_id rid_b = *slot_b;
1925
1926 region *child_region_a = merger->get_region_a <region> (rid_a);
1927 region *child_region_b = merger->get_region_b <region> (rid_b);
1928
1929 gcc_assert (child_region_a->get_type ()
1930 == child_region_b->get_type ());
1931
1932 gcc_assert (child_region_a->get_kind ()
1933 == child_region_b->get_kind ());
1934
1935 region_id child_merged_rid
1936 = merged_map_region->get_or_create (merger->m_merged_model,
1937 merged_rid,
1938 key_a,
1939 child_region_a->get_type (),
1940 NULL);
1941
1942 region *child_merged_region
1943 = merger->m_merged_model->get_region (child_merged_rid);
1944
1945 /* Consider values. */
1946 svalue_id child_a_sid = child_region_a->get_value_direct ();
1947 svalue_id child_b_sid = child_region_b->get_value_direct ();
1948 svalue_id child_merged_sid;
1949 if (!merger->can_merge_values_p (child_a_sid, child_b_sid,
1950 &child_merged_sid))
1951 return false;
1952 if (!child_merged_sid.null_p ())
1953 child_merged_region->set_value (*merger->m_merged_model,
1954 child_merged_rid,
1955 child_merged_sid,
1956 NULL);
1957
1958 if (map_region *map_region_a = child_region_a->dyn_cast_map_region ())
1959 {
1960 /* Recurse. */
1961 if (!can_merge_p (map_region_a,
1962 as_a <map_region *> (child_region_b),
1963 as_a <map_region *> (child_merged_region),
1964 child_merged_rid,
1965 merger))
1966 return false;
1967 }
1968
1969 }
1970 else
1971 {
1972 /* TODO: region is present in A, but absent in B. */
1973 }
1974 }
1975
1976 /* TODO: check for keys in B that aren't in A. */
1977
1978 return true;
1979 }
1980
1981
1982 /* Implementation of region::walk_for_canonicalization vfunc for
1983 map_region. */
1984
1985 void
1986 map_region::walk_for_canonicalization (canonicalization *c) const
1987 {
1988 auto_vec<tree> keys (m_map.elements ());
1989 for (map_t::iterator iter = m_map.begin ();
1990 iter != m_map.end ();
1991 ++iter)
1992 {
1993 tree key_a = (*iter).first;
1994 keys.quick_push (key_a);
1995 }
1996 keys.qsort (tree_cmp);
1997
1998 unsigned i;
1999 tree key;
2000 FOR_EACH_VEC_ELT (keys, i, key)
2001 {
2002 region_id rid = *const_cast<map_region *>(this)->m_map.get (key);
2003 c->walk_rid (rid);
2004 }
2005 }
2006
2007 /* For debugging purposes: look for a child region for a decl named
2008 IDENTIFIER (or an SSA_NAME for such a decl), returning its value,
2009 or svalue_id::null if none are found. */
2010
2011 svalue_id
2012 map_region::get_value_by_name (tree identifier,
2013 const region_model &model) const
2014 {
2015 for (map_t::iterator iter = m_map.begin ();
2016 iter != m_map.end ();
2017 ++iter)
2018 {
2019 tree key = (*iter).first;
2020 if (TREE_CODE (key) == SSA_NAME)
2021 if (SSA_NAME_VAR (key))
2022 key = SSA_NAME_VAR (key);
2023 if (DECL_P (key))
2024 if (DECL_NAME (key) == identifier)
2025 {
2026 region_id rid = (*iter).second;
2027 region *region = model.get_region (rid);
2028 return region->get_value (const_cast<region_model &>(model),
2029 false, NULL);
2030 }
2031 }
2032 return svalue_id::null ();
2033 }
2034
2035 /* class struct_or_union_region : public map_region. */
2036
2037 /* Implementation of map_region::valid_key_p vfunc for
2038 struct_or_union_region. */
2039
2040 bool
2041 struct_or_union_region::valid_key_p (tree key) const
2042 {
2043 return TREE_CODE (key) == FIELD_DECL;
2044 }
2045
2046 /* Compare the fields of this struct_or_union_region with OTHER, returning
2047 true if they are equal.
2048 For use by region::operator==. */
2049
2050 bool
2051 struct_or_union_region::compare_fields (const struct_or_union_region &other)
2052 const
2053 {
2054 return map_region::compare_fields (other);
2055 }
2056
2057 /* class struct_region : public struct_or_union_region. */
2058
2059 /* Implementation of region::clone vfunc for struct_region. */
2060
2061 region *
2062 struct_region::clone () const
2063 {
2064 return new struct_region (*this);
2065 }
2066
2067 /* Compare the fields of this struct_region with OTHER, returning true
2068 if they are equal.
2069 For use by region::operator==. */
2070
2071 bool
2072 struct_region::compare_fields (const struct_region &other) const
2073 {
2074 return struct_or_union_region::compare_fields (other);
2075 }
2076
2077 /* class union_region : public struct_or_union_region. */
2078
2079 /* Implementation of region::clone vfunc for union_region. */
2080
2081 region *
2082 union_region::clone () const
2083 {
2084 return new union_region (*this);
2085 }
2086
2087 /* Compare the fields of this union_region with OTHER, returning true
2088 if they are equal.
2089 For use by region::operator==. */
2090
2091 bool
2092 union_region::compare_fields (const union_region &other) const
2093 {
2094 return struct_or_union_region::compare_fields (other);
2095 }
2096
2097 /* class frame_region : public map_region. */
2098
2099 /* Compare the fields of this frame_region with OTHER, returning true
2100 if they are equal.
2101 For use by region::operator==. */
2102
2103 bool
2104 frame_region::compare_fields (const frame_region &other) const
2105 {
2106 if (!map_region::compare_fields (other))
2107 return false;
2108 if (m_fun != other.m_fun)
2109 return false;
2110 if (m_depth != other.m_depth)
2111 return false;
2112 return true;
2113 }
2114
2115 /* Implementation of region::clone vfunc for frame_region. */
2116
2117 region *
2118 frame_region::clone () const
2119 {
2120 return new frame_region (*this);
2121 }
2122
2123 /* Implementation of map_region::valid_key_p vfunc for frame_region. */
2124
2125 bool
2126 frame_region::valid_key_p (tree key) const
2127 {
2128 // TODO: could also check that VAR_DECLs are locals
2129 return (TREE_CODE (key) == PARM_DECL
2130 || TREE_CODE (key) == VAR_DECL
2131 || TREE_CODE (key) == SSA_NAME
2132 || TREE_CODE (key) == RESULT_DECL);
2133 }
2134
2135 /* Implementation of region::print_fields vfunc for frame_region. */
2136
2137 void
2138 frame_region::print_fields (const region_model &model,
2139 region_id this_rid,
2140 pretty_printer *pp) const
2141 {
2142 map_region::print_fields (model, this_rid, pp);
2143 pp_printf (pp, ", function: %qs, depth: %i", function_name (m_fun), m_depth);
2144 }
2145
2146 /* Implementation of region::add_to_hash vfunc for frame_region. */
2147
2148 void
2149 frame_region::add_to_hash (inchash::hash &hstate) const
2150 {
2151 map_region::add_to_hash (hstate);
2152 hstate.add_ptr (m_fun);
2153 hstate.add_int (m_depth);
2154 }
2155
2156 /* class globals_region : public scope_region. */
2157
2158 /* Compare the fields of this globals_region with OTHER, returning true
2159 if they are equal.
2160 For use by region::operator==. */
2161
2162 bool
2163 globals_region::compare_fields (const globals_region &other) const
2164 {
2165 return map_region::compare_fields (other);
2166 }
2167
2168 /* Implementation of region::clone vfunc for globals_region. */
2169
2170 region *
2171 globals_region::clone () const
2172 {
2173 return new globals_region (*this);
2174 }
2175
2176 /* Implementation of map_region::valid_key_p vfunc for globals_region. */
2177
2178 bool
2179 globals_region::valid_key_p (tree key) const
2180 {
2181 return TREE_CODE (key) == VAR_DECL;
2182 }
2183
2184 /* class code_region : public map_region. */
2185
2186 /* Compare the fields of this code_region with OTHER, returning true
2187 if they are equal.
2188 For use by region::operator==. */
2189
2190 bool
2191 code_region::compare_fields (const code_region &other) const
2192 {
2193 return map_region::compare_fields (other);
2194 }
2195
2196 /* Implementation of region::clone vfunc for code_region. */
2197
2198 region *
2199 code_region::clone () const
2200 {
2201 return new code_region (*this);
2202 }
2203
2204 /* Implementation of map_region::valid_key_p vfunc for code_region. */
2205
2206 bool
2207 code_region::valid_key_p (tree key) const
2208 {
2209 return TREE_CODE (key) == FUNCTION_DECL;
2210 }
2211
2212 /* class array_region : public region. */
2213
2214 /* array_region's copy ctor. */
2215
2216 array_region::array_region (const array_region &other)
2217 : region (other),
2218 m_map (other.m_map)
2219 {
2220 }
2221
2222 /* Get a child region for the element with index INDEX_SID. */
2223
2224 region_id
2225 array_region::get_element (region_model *model,
2226 region_id this_rid,
2227 svalue_id index_sid,
2228 region_model_context *ctxt)
2229 {
2230 tree element_type = TREE_TYPE (get_type ());
2231 svalue *index_sval = model->get_svalue (index_sid);
2232 if (tree cst_index = index_sval->maybe_get_constant ())
2233 {
2234 key_t key = key_from_constant (cst_index);
2235 region_id element_rid
2236 = get_or_create (model, this_rid, key, element_type, ctxt);
2237 return element_rid;
2238 }
2239
2240 return model->get_or_create_view (this_rid, element_type, ctxt);
2241 }
2242
2243 /* Implementation of region::clone vfunc for array_region. */
2244
2245 region *
2246 array_region::clone () const
2247 {
2248 return new array_region (*this);
2249 }
2250
2251 /* Compare the fields of this array_region with OTHER, returning true
2252 if they are equal.
2253 For use by region::operator==. */
2254
2255 bool
2256 array_region::compare_fields (const array_region &other) const
2257 {
2258 if (m_map.elements () != other.m_map.elements ())
2259 return false;
2260
2261 for (map_t::iterator iter = m_map.begin ();
2262 iter != m_map.end ();
2263 ++iter)
2264 {
2265 int key = (*iter).first;
2266 region_id e = (*iter).second;
2267 region_id *other_slot = const_cast <map_t &> (other.m_map).get (key);
2268 if (other_slot == NULL)
2269 return false;
2270 if (e != *other_slot)
2271 return false;
2272 }
2273 return true;
2274 }
2275
2276 /* Implementation of region::print_fields vfunc for array_region. */
2277
2278 void
2279 array_region::print_fields (const region_model &model,
2280 region_id this_rid,
2281 pretty_printer *pp) const
2282 {
2283 region::print_fields (model, this_rid, pp);
2284 pp_string (pp, ", array: {");
2285 for (map_t::iterator iter = m_map.begin ();
2286 iter != m_map.end ();
2287 ++iter)
2288 {
2289 if (iter != m_map.begin ())
2290 pp_string (pp, ", ");
2291 int key = (*iter).first;
2292 region_id child_rid = (*iter).second;
2293 pp_printf (pp, "[%i]: ", key);
2294 child_rid.print (pp);
2295 }
2296 pp_string (pp, "}");
2297 }
2298
2299 /* Implementation of region::validate vfunc for array_region. */
2300
2301 void
2302 array_region::validate (const region_model &model) const
2303 {
2304 region::validate (model);
2305 for (map_t::iterator iter = m_map.begin ();
2306 iter != m_map.end ();
2307 ++iter)
2308 {
2309 region_id child_rid = (*iter).second;
2310 child_rid.validate (model);
2311 }
2312 }
2313
2314 /* Implementation of region::dump_dot_to_pp vfunc for array_region. */
2315
2316 void
2317 array_region::dump_dot_to_pp (const region_model &model,
2318 region_id this_rid,
2319 pretty_printer *pp) const
2320 {
2321 region::dump_dot_to_pp (model, this_rid, pp);
2322 for (map_t::iterator iter = m_map.begin ();
2323 iter != m_map.end ();
2324 ++iter)
2325 {
2326 // TODO: add nodes/edges to label things
2327
2328 int key = (*iter).first;
2329 region_id child_rid = (*iter).second;
2330
2331 pp_printf (pp, "rid_label_%i [label=\"", child_rid.as_int ());
2332 pp_write_text_to_stream (pp);
2333 pp_printf (pp, "%qi", key);
2334 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
2335 pp_string (pp, "\"];");
2336 pp_newline (pp);
2337
2338 pp_printf (pp, "rid_label_%i", child_rid.as_int ());
2339 pp_string (pp, " -> ");
2340 child_rid.dump_node_name_to_pp (pp);
2341 pp_string (pp, ";");
2342 pp_newline (pp);
2343 }
2344 }
2345
2346 /* Implementation of region::dump_child_label vfunc for array_region. */
2347
2348 void
2349 array_region::dump_child_label (const region_model &model,
2350 region_id this_rid,
2351 region_id child_rid,
2352 pretty_printer *pp) const
2353 {
2354 region::dump_child_label (model, this_rid, child_rid, pp);
2355
2356 for (map_t::iterator iter = m_map.begin ();
2357 iter != m_map.end ();
2358 ++iter)
2359 {
2360 if (child_rid == (*iter).second)
2361 {
2362 int key = (*iter).first;
2363 pp_printf (pp, "[%i]: ", key);
2364 }
2365 }
2366 }
2367
2368 /* Look for a child region for KEY within this array_region.
2369 If it doesn't already exist, create a child array_region, using TYPE for
2370 its type.
2371 Return the region_id of the child (whether pre-existing, or
2372 newly-created).
2373 Notify CTXT if we don't know how to handle TYPE. */
2374
2375 region_id
2376 array_region::get_or_create (region_model *model,
2377 region_id this_rid,
2378 key_t key,
2379 tree type,
2380 region_model_context *ctxt)
2381 {
2382 region_id *slot = m_map.get (key);
2383 if (slot)
2384 return *slot;
2385 region_id child_rid = model->add_region_for_type (this_rid, type, ctxt);
2386 m_map.put (key, child_rid);
2387 return child_rid;
2388 }
2389
2390 /* Get the region_id for the child region for KEY within this
2391 ARRAY_REGION, or NULL if there is no such child region. */
2392
2393 region_id *
2394 array_region::get (key_t key)
2395 {
2396 region_id *slot = m_map.get (key);
2397 return slot;
2398 }
2399
2400 /* Implementation of region::add_to_hash vfunc for array_region. */
2401
2402 void
2403 array_region::add_to_hash (inchash::hash &hstate) const
2404 {
2405 region::add_to_hash (hstate);
2406 // TODO
2407 }
2408
2409 /* Implementation of region::remap_region_ids vfunc for array_region. */
2410
2411 void
2412 array_region::remap_region_ids (const region_id_map &map)
2413 {
2414 region::remap_region_ids (map);
2415
2416 /* Remap the region ids within the map entries. */
2417 for (map_t::iterator iter = m_map.begin ();
2418 iter != m_map.end (); ++iter)
2419 map.update (&(*iter).second);
2420 }
2421
2422 /* Look for a child region with id CHILD_RID within this array_region.
2423 If one is found, write its key to *OUT and return true,
2424 otherwise return false. */
2425
2426 bool
2427 array_region::get_key_for_child_region (region_id child_rid, key_t *out) const
2428 {
2429 // TODO: do we want to store an inverse map?
2430 for (map_t::iterator iter = m_map.begin ();
2431 iter != m_map.end ();
2432 ++iter)
2433 {
2434 key_t key = (*iter).first;
2435 region_id r = (*iter).second;
2436 if (r == child_rid)
2437 {
2438 *out = key;
2439 return true;
2440 }
2441 }
2442
2443 return false;
2444 }
2445
2446 /* qsort comparator for array_region's keys. */
2447
2448 int
2449 array_region::key_cmp (const void *p1, const void *p2)
2450 {
2451 key_t i1 = *(const key_t *)p1;
2452 key_t i2 = *(const key_t *)p2;
2453
2454 if (i1 > i2)
2455 return 1;
2456 else if (i1 < i2)
2457 return -1;
2458 else
2459 return 0;
2460 }
2461
2462 /* Implementation of region::walk_for_canonicalization vfunc for
2463 array_region. */
2464
2465 void
2466 array_region::walk_for_canonicalization (canonicalization *c) const
2467 {
2468 auto_vec<int> keys (m_map.elements ());
2469 for (map_t::iterator iter = m_map.begin ();
2470 iter != m_map.end ();
2471 ++iter)
2472 {
2473 int key_a = (*iter).first;
2474 keys.quick_push (key_a);
2475 }
2476 keys.qsort (key_cmp);
2477
2478 unsigned i;
2479 int key;
2480 FOR_EACH_VEC_ELT (keys, i, key)
2481 {
2482 region_id rid = *const_cast<array_region *>(this)->m_map.get (key);
2483 c->walk_rid (rid);
2484 }
2485 }
2486
2487 /* Convert constant CST into an array_region::key_t. */
2488
2489 array_region::key_t
2490 array_region::key_from_constant (tree cst)
2491 {
2492 gcc_assert (CONSTANT_CLASS_P (cst));
2493 wide_int w = wi::to_wide (cst);
2494 key_t result = w.to_shwi ();
2495 return result;
2496 }
2497
2498 /* Convert array_region::key_t KEY into a tree constant. */
2499
2500 tree
2501 array_region::constant_from_key (key_t key)
2502 {
2503 tree array_type = get_type ();
2504 tree index_type = TYPE_DOMAIN (array_type);
2505 return build_int_cst (index_type, key);
2506 }
2507
2508 /* class function_region : public map_region. */
2509
2510 /* Compare the fields of this function_region with OTHER, returning true
2511 if they are equal.
2512 For use by region::operator==. */
2513
2514 bool
2515 function_region::compare_fields (const function_region &other) const
2516 {
2517 return map_region::compare_fields (other);
2518 }
2519
2520 /* Implementation of region::clone vfunc for function_region. */
2521
2522 region *
2523 function_region::clone () const
2524 {
2525 return new function_region (*this);
2526 }
2527
2528 /* Implementation of map_region::valid_key_p vfunc for function_region. */
2529
2530 bool
2531 function_region::valid_key_p (tree key) const
2532 {
2533 return TREE_CODE (key) == LABEL_DECL;
2534 }
2535
2536 /* class stack_region : public region. */
2537
2538 /* stack_region's copy ctor. */
2539
2540 stack_region::stack_region (const stack_region &other)
2541 : region (other),
2542 m_frame_rids (other.m_frame_rids.length ())
2543 {
2544 int i;
2545 region_id *frame_rid;
2546 FOR_EACH_VEC_ELT (other.m_frame_rids, i, frame_rid)
2547 m_frame_rids.quick_push (*frame_rid);
2548 }
2549
2550 /* Compare the fields of this stack_region with OTHER, returning true
2551 if they are equal.
2552 For use by region::operator==. */
2553
2554 bool
2555 stack_region::compare_fields (const stack_region &other) const
2556 {
2557 if (m_frame_rids.length () != other.m_frame_rids.length ())
2558 return false;
2559
2560 int i;
2561 region_id *frame_rid;
2562 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2563 if (m_frame_rids[i] != other.m_frame_rids[i])
2564 return false;
2565
2566 return true;
2567 }
2568
2569 /* Implementation of region::clone vfunc for stack_region. */
2570
2571 region *
2572 stack_region::clone () const
2573 {
2574 return new stack_region (*this);
2575 }
2576
2577 /* Implementation of region::print_fields vfunc for stack_region. */
2578
2579 void
2580 stack_region::print_fields (const region_model &model,
2581 region_id this_rid,
2582 pretty_printer *pp) const
2583 {
2584 region::print_fields (model, this_rid, pp);
2585 // TODO
2586 }
2587
2588 /* Implementation of region::dump_child_label vfunc for stack_region. */
2589
2590 void
2591 stack_region::dump_child_label (const region_model &model,
2592 region_id this_rid ATTRIBUTE_UNUSED,
2593 region_id child_rid,
2594 pretty_printer *pp) const
2595 {
2596 function *fun = model.get_region<frame_region> (child_rid)->get_function ();
2597 pp_printf (pp, "frame for %qs: ", function_name (fun));
2598 }
2599
2600 /* Implementation of region::validate vfunc for stack_region. */
2601
2602 void
2603 stack_region::validate (const region_model &model) const
2604 {
2605 region::validate (model);
2606 int i;
2607 region_id *frame_rid;
2608 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2609 m_frame_rids[i].validate (model);
2610 }
2611
2612 /* Push FRAME_RID (for a frame_region) onto this stack. */
2613
2614 void
2615 stack_region::push_frame (region_id frame_rid)
2616 {
2617 m_frame_rids.safe_push (frame_rid);
2618 }
2619
2620 /* Get the region_id of the top-most frame in this stack, if any. */
2621
2622 region_id
2623 stack_region::get_current_frame_id () const
2624 {
2625 if (m_frame_rids.length () > 0)
2626 return m_frame_rids[m_frame_rids.length () - 1];
2627 else
2628 return region_id::null ();
2629 }
2630
2631 /* Pop the topmost frame_region from this stack.
2632
2633 Purge the frame region and all its descendent regions.
2634 Convert any pointers that point into such regions into
2635 POISON_KIND_POPPED_STACK svalues.
2636
2637 Return the ID of any return value from the frame.
2638
2639 If PURGE, then purge all unused svalues, with the exception of any
2640 return value for the frame, which is temporarily
2641 preserved in case no regions reference it, so it can
2642 be written into a region in the caller.
2643
2644 Accumulate stats on purged entities into STATS. */
2645
2646 svalue_id
2647 stack_region::pop_frame (region_model *model, bool purge, purge_stats *stats,
2648 region_model_context *ctxt)
2649 {
2650 gcc_assert (m_frame_rids.length () > 0);
2651
2652 region_id frame_rid = get_current_frame_id ();
2653 frame_region *frame = model->get_region<frame_region> (frame_rid);
2654
2655 /* Evaluate the result, within the callee frame. */
2656 svalue_id result_sid;
2657 tree fndecl = frame->get_function ()->decl;
2658 tree result = DECL_RESULT (fndecl);
2659 if (result && TREE_TYPE (result) != void_type_node)
2660 result_sid = model->get_rvalue (result, ctxt);
2661
2662 /* Pop the frame RID. */
2663 m_frame_rids.pop ();
2664
2665 model->delete_region_and_descendents (frame_rid,
2666 POISON_KIND_POPPED_STACK,
2667 stats,
2668 ctxt ? ctxt->get_logger () : NULL);
2669
2670 /* Delete unused svalues, but don't delete the return value. */
2671 if (purge)
2672 model->purge_unused_svalues (stats, ctxt, &result_sid);
2673
2674 model->validate ();
2675
2676 return result_sid;
2677 }
2678
2679 /* Implementation of region::add_to_hash vfunc for stack_region. */
2680
2681 void
2682 stack_region::add_to_hash (inchash::hash &hstate) const
2683 {
2684 region::add_to_hash (hstate);
2685
2686 int i;
2687 region_id *frame_rid;
2688 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2689 inchash::add (*frame_rid, hstate);
2690 }
2691
2692 /* Implementation of region::remap_region_ids vfunc for stack_region. */
2693
2694 void
2695 stack_region::remap_region_ids (const region_id_map &map)
2696 {
2697 region::remap_region_ids (map);
2698 int i;
2699 region_id *frame_rid;
2700 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2701 map.update (&m_frame_rids[i]);
2702 }
2703
2704 /* Attempt to merge STACK_REGION_A and STACK_REGION_B using MERGER.
2705 Return true if the merger is possible, false otherwise. */
2706
2707 bool
2708 stack_region::can_merge_p (const stack_region *stack_region_a,
2709 const stack_region *stack_region_b,
2710 model_merger *merger)
2711 {
2712 if (stack_region_a->get_num_frames ()
2713 != stack_region_b->get_num_frames ())
2714 return false;
2715
2716 region_model *merged_model = merger->m_merged_model;
2717
2718 region_id rid_merged_stack
2719 = merged_model->get_root_region ()->ensure_stack_region (merged_model);
2720
2721 stack_region *merged_stack
2722 = merged_model->get_region <stack_region> (rid_merged_stack);
2723
2724 /* First, create all frames in the merged model, without populating them.
2725 The merging code assumes that all frames in the merged model already exist,
2726 so we have to do this first to handle the case in which a local in an
2727 older frame points at a local in a more recent frame. */
2728 for (unsigned i = 0; i < stack_region_a->get_num_frames (); i++)
2729 {
2730 region_id rid_a = stack_region_a->get_frame_rid (i);
2731 frame_region *frame_a = merger->get_region_a <frame_region> (rid_a);
2732
2733 region_id rid_b = stack_region_b->get_frame_rid (i);
2734 frame_region *frame_b = merger->get_region_b <frame_region> (rid_b);
2735
2736 if (frame_a->get_function () != frame_b->get_function ())
2737 return false;
2738
2739 frame_region *merged_frame = new frame_region (rid_merged_stack,
2740 frame_a->get_function (),
2741 frame_a->get_depth ());
2742 region_id rid_merged_frame = merged_model->add_region (merged_frame);
2743 merged_stack->push_frame (rid_merged_frame);
2744 }
2745
2746 /* Now populate the frames we created. */
2747 for (unsigned i = 0; i < stack_region_a->get_num_frames (); i++)
2748 {
2749 region_id rid_a = stack_region_a->get_frame_rid (i);
2750 frame_region *frame_a = merger->get_region_a <frame_region> (rid_a);
2751
2752 region_id rid_b = stack_region_b->get_frame_rid (i);
2753 frame_region *frame_b = merger->get_region_b <frame_region> (rid_b);
2754
2755 region_id rid_merged_frame = merged_stack->get_frame_rid (i);
2756 frame_region *merged_frame
2757 = merged_model->get_region <frame_region> (rid_merged_frame);
2758 if (!map_region::can_merge_p (frame_a, frame_b,
2759 merged_frame, rid_merged_frame,
2760 merger))
2761 return false;
2762 }
2763
2764 return true;
2765 }
2766
2767 /* Implementation of region::walk_for_canonicalization vfunc for
2768 stack_region. */
2769
2770 void
2771 stack_region::walk_for_canonicalization (canonicalization *c) const
2772 {
2773 int i;
2774 region_id *frame_rid;
2775 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2776 c->walk_rid (*frame_rid);
2777 }
2778
2779 /* For debugging purposes: look for a grandchild region within one of
2780 the child frame regions, where the grandchild is for a decl named
2781 IDENTIFIER (or an SSA_NAME for such a decl):
2782
2783 stack_region
2784 `-frame_region
2785 `-region for decl named IDENTIFIER
2786
2787 returning its value, or svalue_id::null if none are found. */
2788
2789 svalue_id
2790 stack_region::get_value_by_name (tree identifier,
2791 const region_model &model) const
2792 {
2793 int i;
2794 region_id *frame_rid;
2795 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2796 {
2797 frame_region *frame = model.get_region<frame_region> (*frame_rid);
2798 svalue_id sid = frame->get_value_by_name (identifier, model);
2799 if (!sid.null_p ())
2800 return sid;
2801 }
2802
2803 return svalue_id::null ();
2804 }
2805
2806 /* class heap_region : public region. */
2807
2808 /* heap_region's copy ctor. */
2809
2810 heap_region::heap_region (const heap_region &other)
2811 : region (other)
2812 {
2813 }
2814
2815 /* Compare the fields of this heap_region with OTHER, returning true
2816 if they are equal.
2817 For use by region::operator==. */
2818
2819 bool
2820 heap_region::compare_fields (const heap_region &) const
2821 {
2822 /* Empty. */
2823 return true;
2824 }
2825
2826 /* Implementation of region::clone vfunc for heap_region. */
2827
2828 region *
2829 heap_region::clone () const
2830 {
2831 return new heap_region (*this);
2832 }
2833
2834 /* Implementation of region::walk_for_canonicalization vfunc for
2835 heap_region. */
2836
2837 void
2838 heap_region::walk_for_canonicalization (canonicalization *) const
2839 {
2840 /* Empty. */
2841 }
2842
2843 /* class root_region : public region. */
2844
2845 /* root_region's default ctor. */
2846
2847 root_region::root_region ()
2848 : region (region_id::null (),
2849 svalue_id::null (),
2850 NULL_TREE)
2851 {
2852 }
2853
2854 /* root_region's copy ctor. */
2855
2856 root_region::root_region (const root_region &other)
2857 : region (other),
2858 m_stack_rid (other.m_stack_rid),
2859 m_globals_rid (other.m_globals_rid),
2860 m_code_rid (other.m_code_rid),
2861 m_heap_rid (other.m_heap_rid)
2862 {
2863 }
2864
2865 /* Compare the fields of this root_region with OTHER, returning true
2866 if they are equal.
2867 For use by region::operator==. */
2868
2869 bool
2870 root_region::compare_fields (const root_region &other) const
2871 {
2872 if (m_stack_rid != other.m_stack_rid)
2873 return false;
2874 if (m_globals_rid != other.m_globals_rid)
2875 return false;
2876 if (m_code_rid != other.m_code_rid)
2877 return false;
2878 if (m_heap_rid != other.m_heap_rid)
2879 return false;
2880 return true;
2881 }
2882
2883 /* Implementation of region::clone vfunc for root_region. */
2884
2885 region *
2886 root_region::clone () const
2887 {
2888 return new root_region (*this);
2889 }
2890
2891 /* Implementation of region::print_fields vfunc for root_region. */
2892
2893 void
2894 root_region::print_fields (const region_model &model,
2895 region_id this_rid,
2896 pretty_printer *pp) const
2897 {
2898 region::print_fields (model, this_rid, pp);
2899 // TODO
2900 }
2901
2902 /* Implementation of region::validate vfunc for root_region. */
2903
2904 void
2905 root_region::validate (const region_model &model) const
2906 {
2907 region::validate (model);
2908 m_stack_rid.validate (model);
2909 m_globals_rid.validate (model);
2910 m_code_rid.validate (model);
2911 m_heap_rid.validate (model);
2912 }
2913
2914 /* Implementation of region::dump_child_label vfunc for root_region. */
2915
2916 void
2917 root_region::dump_child_label (const region_model &model ATTRIBUTE_UNUSED,
2918 region_id this_rid ATTRIBUTE_UNUSED,
2919 region_id child_rid,
2920 pretty_printer *pp) const
2921 {
2922 if (child_rid == m_stack_rid)
2923 pp_printf (pp, "stack: ");
2924 else if (child_rid == m_globals_rid)
2925 pp_printf (pp, "globals: ");
2926 else if (child_rid == m_code_rid)
2927 pp_printf (pp, "code: ");
2928 else if (child_rid == m_heap_rid)
2929 pp_printf (pp, "heap: ");
2930 }
2931
2932 /* Create a new frame_region for a call to FUN and push it onto
2933 the stack.
2934
2935 If ARG_SIDS is non-NULL, use it to populate the parameters
2936 in the new frame.
2937 Otherwise, populate them with unknown values.
2938
2939 Return the region_id of the new frame. */
2940
2941 region_id
2942 root_region::push_frame (region_model *model, function *fun,
2943 vec<svalue_id> *arg_sids,
2944 region_model_context *ctxt)
2945 {
2946 gcc_assert (fun);
2947 /* arg_sids can be NULL. */
2948
2949 ensure_stack_region (model);
2950 stack_region *stack = model->get_region <stack_region> (m_stack_rid);
2951
2952 frame_region *region = new frame_region (m_stack_rid, fun,
2953 stack->get_num_frames ());
2954 region_id frame_rid = model->add_region (region);
2955
2956 // TODO: unify these cases by building a vec of unknown?
2957
2958 if (arg_sids)
2959 {
2960 /* Arguments supplied from a caller frame. */
2961
2962 tree fndecl = fun->decl;
2963 unsigned idx = 0;
2964 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2965 iter_parm = DECL_CHAIN (iter_parm), ++idx)
2966 {
2967 /* If there's a mismatching declaration, the call stmt might
2968 not have enough args. Handle this case by leaving the
2969 rest of the params as uninitialized. */
2970 if (idx >= arg_sids->length ())
2971 break;
2972 svalue_id arg_sid = (*arg_sids)[idx];
2973 region_id parm_rid
2974 = region->get_or_create (model, frame_rid, iter_parm,
2975 TREE_TYPE (iter_parm), ctxt);
2976 model->set_value (parm_rid, arg_sid, ctxt);
2977
2978 /* Also do it for default SSA name (sharing the same unknown
2979 value). */
2980 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2981 if (parm_default_ssa)
2982 {
2983 region_id defssa_rid
2984 = region->get_or_create (model, frame_rid, parm_default_ssa,
2985 TREE_TYPE (iter_parm), ctxt);
2986 model->set_value (defssa_rid, arg_sid, ctxt);
2987 }
2988 }
2989 }
2990 else
2991 {
2992 /* No known arguments (a top-level call within the analysis). */
2993
2994 /* Params have a defined, unknown value; they should not inherit
2995 from the poisoned uninit value. */
2996 tree fndecl = fun->decl;
2997 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2998 iter_parm = DECL_CHAIN (iter_parm))
2999 {
3000 region_id parm_rid
3001 = region->get_or_create (model, frame_rid, iter_parm,
3002 TREE_TYPE (iter_parm), ctxt);
3003 svalue_id parm_sid
3004 = model->set_to_new_unknown_value (parm_rid, TREE_TYPE (iter_parm),
3005 ctxt);
3006
3007 /* Also do it for default SSA name (sharing the same unknown
3008 value). */
3009 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
3010 if (parm_default_ssa)
3011 {
3012 region_id defssa_rid
3013 = region->get_or_create (model, frame_rid, parm_default_ssa,
3014 TREE_TYPE (iter_parm), ctxt);
3015 model->get_region (defssa_rid)->set_value (*model, defssa_rid,
3016 parm_sid, ctxt);
3017 }
3018 }
3019 }
3020
3021 stack->push_frame (frame_rid);
3022
3023 return frame_rid;
3024 }
3025
3026 /* Get the region_id of the top-most frame in this root_region's stack,
3027 if any. */
3028
3029 region_id
3030 root_region::get_current_frame_id (const region_model &model) const
3031 {
3032 stack_region *stack = model.get_region <stack_region> (m_stack_rid);
3033 if (stack)
3034 return stack->get_current_frame_id ();
3035 else
3036 return region_id::null ();
3037 }
3038
3039 /* Pop the topmost frame_region from this root_region's stack;
3040 see the comment for stack_region::pop_frame. */
3041
3042 svalue_id
3043 root_region::pop_frame (region_model *model, bool purge, purge_stats *out,
3044 region_model_context *ctxt)
3045 {
3046 stack_region *stack = model->get_region <stack_region> (m_stack_rid);
3047 return stack->pop_frame (model, purge, out, ctxt);
3048 }
3049
3050 /* Return the region_id of the stack region, creating it if doesn't
3051 already exist. */
3052
3053 region_id
3054 root_region::ensure_stack_region (region_model *model)
3055 {
3056 if (m_stack_rid.null_p ())
3057 {
3058 svalue_id uninit_sid
3059 = model->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT,
3060 NULL_TREE));
3061 m_stack_rid
3062 = model->add_region (new stack_region (model->get_root_rid (),
3063 uninit_sid));
3064 }
3065 return m_stack_rid;
3066 }
3067
3068 /* Return the stack region (which could be NULL). */
3069
3070 stack_region *
3071 root_region::get_stack_region (const region_model *model) const
3072 {
3073 return model->get_region <stack_region> (m_stack_rid);
3074 }
3075
3076 /* Return the region_id of the globals region, creating it if doesn't
3077 already exist. */
3078
3079 region_id
3080 root_region::ensure_globals_region (region_model *model)
3081 {
3082 if (m_globals_rid.null_p ())
3083 m_globals_rid
3084 = model->add_region (new globals_region (model->get_root_rid ()));
3085 return m_globals_rid;
3086 }
3087
3088 /* Return the code region (which could be NULL). */
3089
3090 code_region *
3091 root_region::get_code_region (const region_model *model) const
3092 {
3093 return model->get_region <code_region> (m_code_rid);
3094 }
3095
3096 /* Return the region_id of the code region, creating it if doesn't
3097 already exist. */
3098
3099 region_id
3100 root_region::ensure_code_region (region_model *model)
3101 {
3102 if (m_code_rid.null_p ())
3103 m_code_rid
3104 = model->add_region (new code_region (model->get_root_rid ()));
3105 return m_code_rid;
3106 }
3107
3108 /* Return the globals region (which could be NULL). */
3109
3110 globals_region *
3111 root_region::get_globals_region (const region_model *model) const
3112 {
3113 return model->get_region <globals_region> (m_globals_rid);
3114 }
3115
3116 /* Return the region_id of the heap region, creating it if doesn't
3117 already exist. */
3118
3119 region_id
3120 root_region::ensure_heap_region (region_model *model)
3121 {
3122 if (m_heap_rid.null_p ())
3123 {
3124 svalue_id uninit_sid
3125 = model->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT,
3126 NULL_TREE));
3127 m_heap_rid
3128 = model->add_region (new heap_region (model->get_root_rid (),
3129 uninit_sid));
3130 }
3131 return m_heap_rid;
3132 }
3133
3134 /* Return the heap region (which could be NULL). */
3135
3136 heap_region *
3137 root_region::get_heap_region (const region_model *model) const
3138 {
3139 return model->get_region <heap_region> (m_heap_rid);
3140 }
3141
3142 /* Implementation of region::remap_region_ids vfunc for root_region. */
3143
3144 void
3145 root_region::remap_region_ids (const region_id_map &map)
3146 {
3147 map.update (&m_stack_rid);
3148 map.update (&m_globals_rid);
3149 map.update (&m_code_rid);
3150 map.update (&m_heap_rid);
3151 }
3152
3153 /* Attempt to merge ROOT_REGION_A and ROOT_REGION_B into
3154 MERGED_ROOT_REGION using MERGER.
3155 Return true if the merger is possible, false otherwise. */
3156
3157 bool
3158 root_region::can_merge_p (const root_region *root_region_a,
3159 const root_region *root_region_b,
3160 root_region *merged_root_region,
3161 model_merger *merger)
3162 {
3163 /* We can only merge if the stacks are sufficiently similar. */
3164 stack_region *stack_a = root_region_a->get_stack_region (merger->m_model_a);
3165 stack_region *stack_b = root_region_b->get_stack_region (merger->m_model_b);
3166 if (stack_a && stack_b)
3167 {
3168 /* If the two models both have a stack, attempt to merge them. */
3169 merged_root_region->ensure_stack_region (merger->m_merged_model);
3170 if (!stack_region::can_merge_p (stack_a, stack_b, merger))
3171 return false;
3172 }
3173 else if (stack_a || stack_b)
3174 /* Don't attempt to merge if one model has a stack and the other
3175 doesn't. */
3176 return false;
3177
3178 map_region *globals_a = root_region_a->get_globals_region (merger->m_model_a);
3179 map_region *globals_b = root_region_b->get_globals_region (merger->m_model_b);
3180 if (globals_a && globals_b)
3181 {
3182 /* If both models have globals regions, attempt to merge them. */
3183 region_id merged_globals_rid
3184 = merged_root_region->ensure_globals_region (merger->m_merged_model);
3185 map_region *merged_globals
3186 = merged_root_region->get_globals_region (merger->m_merged_model);
3187 if (!map_region::can_merge_p (globals_a, globals_b,
3188 merged_globals, merged_globals_rid,
3189 merger))
3190 return false;
3191 }
3192 /* otherwise, merge as "no globals". */
3193
3194 map_region *code_a = root_region_a->get_code_region (merger->m_model_a);
3195 map_region *code_b = root_region_b->get_code_region (merger->m_model_b);
3196 if (code_a && code_b)
3197 {
3198 /* If both models have code regions, attempt to merge them. */
3199 region_id merged_code_rid
3200 = merged_root_region->ensure_code_region (merger->m_merged_model);
3201 map_region *merged_code
3202 = merged_root_region->get_code_region (merger->m_merged_model);
3203 if (!map_region::can_merge_p (code_a, code_b,
3204 merged_code, merged_code_rid,
3205 merger))
3206 return false;
3207 }
3208 /* otherwise, merge as "no code". */
3209
3210 heap_region *heap_a = root_region_a->get_heap_region (merger->m_model_a);
3211 heap_region *heap_b = root_region_b->get_heap_region (merger->m_model_b);
3212 if (heap_a && heap_b)
3213 {
3214 /* If both have a heap, create a "merged" heap.
3215 Actually merging the heap contents happens via the region_svalue
3216 instances, as needed, when seeing pairs of region_svalue instances. */
3217 merged_root_region->ensure_heap_region (merger->m_merged_model);
3218 }
3219 /* otherwise, merge as "no heap". */
3220
3221 return true;
3222 }
3223
3224 /* Implementation of region::add_to_hash vfunc for root_region. */
3225
3226 void
3227 root_region::add_to_hash (inchash::hash &hstate) const
3228 {
3229 region::add_to_hash (hstate);
3230 inchash::add (m_stack_rid, hstate);
3231 inchash::add (m_globals_rid, hstate);
3232 inchash::add (m_code_rid, hstate);
3233 inchash::add (m_heap_rid, hstate);
3234 }
3235
3236 /* Implementation of region::walk_for_canonicalization vfunc for
3237 root_region. */
3238
3239 void
3240 root_region::walk_for_canonicalization (canonicalization *c) const
3241 {
3242 c->walk_rid (m_stack_rid);
3243 c->walk_rid (m_globals_rid);
3244 c->walk_rid (m_code_rid);
3245 c->walk_rid (m_heap_rid);
3246 }
3247
3248 /* For debugging purposes: look for a descendant region for a local
3249 or global decl named IDENTIFIER (or an SSA_NAME for such a decl),
3250 returning its value, or svalue_id::null if none are found. */
3251
3252 svalue_id
3253 root_region::get_value_by_name (tree identifier,
3254 const region_model &model) const
3255 {
3256 if (stack_region *stack = get_stack_region (&model))
3257 {
3258 svalue_id sid = stack->get_value_by_name (identifier, model);
3259 if (!sid.null_p ())
3260 return sid;
3261 }
3262 if (map_region *globals = get_globals_region (&model))
3263 {
3264 svalue_id sid = globals->get_value_by_name (identifier, model);
3265 if (!sid.null_p ())
3266 return sid;
3267 }
3268 return svalue_id::null ();
3269 }
3270
3271 /* class symbolic_region : public map_region. */
3272
3273 /* symbolic_region's copy ctor. */
3274
3275 symbolic_region::symbolic_region (const symbolic_region &other)
3276 : region (other),
3277 m_possibly_null (other.m_possibly_null)
3278 {
3279 }
3280
3281 /* Compare the fields of this symbolic_region with OTHER, returning true
3282 if they are equal.
3283 For use by region::operator==. */
3284
3285 bool
3286 symbolic_region::compare_fields (const symbolic_region &other) const
3287 {
3288 return m_possibly_null == other.m_possibly_null;
3289 }
3290
3291 /* Implementation of region::clone vfunc for symbolic_region. */
3292
3293 region *
3294 symbolic_region::clone () const
3295 {
3296 return new symbolic_region (*this);
3297 }
3298
3299 /* Implementation of region::walk_for_canonicalization vfunc for
3300 symbolic_region. */
3301
3302 void
3303 symbolic_region::walk_for_canonicalization (canonicalization *) const
3304 {
3305 /* Empty. */
3306 }
3307
3308 /* class region_model. */
3309
3310 /* region_model's default ctor. */
3311
3312 region_model::region_model ()
3313 {
3314 m_root_rid = add_region (new root_region ());
3315 m_constraints = new impl_constraint_manager (this);
3316 // TODO
3317 }
3318
3319 /* region_model's copy ctor. */
3320
3321 region_model::region_model (const region_model &other)
3322 : m_svalues (other.m_svalues.length ()),
3323 m_regions (other.m_regions.length ()),
3324 m_root_rid (other.m_root_rid)
3325 {
3326 /* Clone the svalues and regions. */
3327 int i;
3328
3329 svalue *svalue;
3330 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue)
3331 m_svalues.quick_push (svalue->clone ());
3332
3333 region *region;
3334 FOR_EACH_VEC_ELT (other.m_regions, i, region)
3335 m_regions.quick_push (region->clone ());
3336
3337 m_constraints = other.m_constraints->clone (this);
3338 }
3339
3340 /* region_model's dtor. */
3341
3342 region_model::~region_model ()
3343 {
3344 delete m_constraints;
3345 }
3346
3347 /* region_model's assignment operator. */
3348
3349 region_model &
3350 region_model::operator= (const region_model &other)
3351 {
3352 unsigned i;
3353 svalue *svalue;
3354 region *region;
3355
3356 /* Delete existing content. */
3357 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3358 delete svalue;
3359 m_svalues.truncate (0);
3360
3361 FOR_EACH_VEC_ELT (m_regions, i, region)
3362 delete region;
3363 m_regions.truncate (0);
3364
3365 delete m_constraints;
3366
3367 /* Clone the svalues and regions. */
3368 m_svalues.reserve (other.m_svalues.length (), true);
3369 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue)
3370 m_svalues.quick_push (svalue->clone ());
3371
3372 m_regions.reserve (other.m_regions.length (), true);
3373 FOR_EACH_VEC_ELT (other.m_regions, i, region)
3374 m_regions.quick_push (region->clone ());
3375
3376 m_root_rid = other.m_root_rid;
3377
3378 m_constraints = other.m_constraints->clone (this);
3379
3380 return *this;
3381 }
3382
3383 /* Equality operator for region_model.
3384
3385 Amongst other things this directly compares the svalue and region
3386 vectors and so for this to be meaningful both this and OTHER should
3387 have been canonicalized. */
3388
3389 bool
3390 region_model::operator== (const region_model &other) const
3391 {
3392 if (m_root_rid != other.m_root_rid)
3393 return false;
3394
3395 if (m_svalues.length () != other.m_svalues.length ())
3396 return false;
3397
3398 if (m_regions.length () != other.m_regions.length ())
3399 return false;
3400
3401 if (*m_constraints != *other.m_constraints)
3402 return false;
3403
3404 unsigned i;
3405 svalue *svalue;
3406 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue)
3407 if (!(*m_svalues[i] == *other.m_svalues[i]))
3408 return false;
3409
3410 region *region;
3411 FOR_EACH_VEC_ELT (other.m_regions, i, region)
3412 if (!(*m_regions[i] == *other.m_regions[i]))
3413 return false;
3414
3415 gcc_checking_assert (hash () == other.hash ());
3416
3417 return true;
3418 }
3419
3420 /* Generate a hash value for this region_model. */
3421
3422 hashval_t
3423 region_model::hash () const
3424 {
3425 hashval_t result = 0;
3426 int i;
3427
3428 svalue *svalue;
3429 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3430 result ^= svalue->hash ();
3431
3432 region *region;
3433 FOR_EACH_VEC_ELT (m_regions, i, region)
3434 result ^= region->hash ();
3435
3436 result ^= m_constraints->hash ();
3437
3438 return result;
3439 }
3440
3441 /* Print an all-on-one-line representation of this region_model to PP,
3442 which must support %E for trees. */
3443
3444 void
3445 region_model::print (pretty_printer *pp) const
3446 {
3447 int i;
3448
3449 pp_string (pp, "svalues: [");
3450 svalue *svalue;
3451 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3452 {
3453 if (i > 0)
3454 pp_string (pp, ", ");
3455 print_svalue (svalue_id::from_int (i), pp);
3456 }
3457
3458 pp_string (pp, "], regions: [");
3459
3460 region *region;
3461 FOR_EACH_VEC_ELT (m_regions, i, region)
3462 {
3463 if (i > 0)
3464 pp_string (pp, ", ");
3465 region->print (*this, region_id::from_int (i), pp);
3466 }
3467
3468 pp_string (pp, "], constraints: ");
3469
3470 m_constraints->print (pp);
3471 }
3472
3473 /* Print the svalue with id SID to PP. */
3474
3475 void
3476 region_model::print_svalue (svalue_id sid, pretty_printer *pp) const
3477 {
3478 get_svalue (sid)->print (*this, sid, pp);
3479 }
3480
3481 /* Dump a .dot representation of this region_model to PP, showing
3482 the values and the hierarchy of regions. */
3483
3484 void
3485 region_model::dump_dot_to_pp (pretty_printer *pp) const
3486 {
3487 graphviz_out gv (pp);
3488
3489 pp_string (pp, "digraph \"");
3490 pp_write_text_to_stream (pp);
3491 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
3492 pp_string (pp, "\" {\n");
3493
3494 gv.indent ();
3495
3496 pp_string (pp, "overlap=false;\n");
3497 pp_string (pp, "compound=true;\n");
3498
3499 int i;
3500
3501 svalue *svalue;
3502 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3503 svalue->dump_dot_to_pp (*this, svalue_id::from_int (i), pp);
3504
3505 region *region;
3506 FOR_EACH_VEC_ELT (m_regions, i, region)
3507 region->dump_dot_to_pp (*this, region_id::from_int (i), pp);
3508
3509 /* TODO: constraints. */
3510
3511 /* Terminate "digraph" */
3512 gv.outdent ();
3513 pp_string (pp, "}");
3514 pp_newline (pp);
3515 }
3516
3517 /* Dump a .dot representation of this region_model to FP. */
3518
3519 void
3520 region_model::dump_dot_to_file (FILE *fp) const
3521 {
3522 pretty_printer pp;
3523 pp_format_decoder (&pp) = default_tree_printer;
3524 pp.buffer->stream = fp;
3525 dump_dot_to_pp (&pp);
3526 pp_flush (&pp);
3527 }
3528
3529 /* Dump a .dot representation of this region_model to PATH. */
3530
3531 void
3532 region_model::dump_dot (const char *path) const
3533 {
3534 FILE *fp = fopen (path, "w");
3535 dump_dot_to_file (fp);
3536 fclose (fp);
3537 }
3538
3539 /* Dump a multiline representation of this model to PP, showing the
3540 region hierarchy, the svalues, and any constraints.
3541
3542 If SUMMARIZE is true, show only the most pertinent information,
3543 in a form that attempts to be less verbose.
3544 Otherwise, show all information. */
3545
3546 void
3547 region_model::dump_to_pp (pretty_printer *pp, bool summarize) const
3548 {
3549 if (summarize)
3550 {
3551 auto_vec<path_var> rep_path_vars;
3552
3553 unsigned i;
3554 region *reg;
3555 FOR_EACH_VEC_ELT (m_regions, i, reg)
3556 {
3557 region_id rid = region_id::from_int (i);
3558 path_var pv = get_representative_path_var (rid);
3559 if (pv.m_tree)
3560 rep_path_vars.safe_push (pv);
3561 }
3562 bool is_first = true;
3563
3564 /* Work with a copy in case the get_lvalue calls change anything
3565 (they shouldn't). */
3566 region_model copy (*this);
3567 copy.dump_summary_of_rep_path_vars (pp, &rep_path_vars, &is_first);
3568
3569 equiv_class *ec;
3570 FOR_EACH_VEC_ELT (m_constraints->m_equiv_classes, i, ec)
3571 {
3572 for (unsigned j = 0; j < ec->m_vars.length (); j++)
3573 {
3574 svalue_id lhs_sid = ec->m_vars[j];
3575 tree lhs_tree = get_representative_tree (lhs_sid);
3576 if (lhs_tree == NULL_TREE)
3577 continue;
3578 for (unsigned k = j + 1; k < ec->m_vars.length (); k++)
3579 {
3580 svalue_id rhs_sid = ec->m_vars[k];
3581 tree rhs_tree = get_representative_tree (rhs_sid);
3582 if (rhs_tree
3583 && !(CONSTANT_CLASS_P (lhs_tree)
3584 && CONSTANT_CLASS_P (rhs_tree)))
3585 {
3586 dump_separator (pp, &is_first);
3587 dump_tree (pp, lhs_tree);
3588 pp_string (pp, " == ");
3589 dump_tree (pp, rhs_tree);
3590 }
3591 }
3592 }
3593 }
3594
3595 constraint *c;
3596 FOR_EACH_VEC_ELT (m_constraints->m_constraints, i, c)
3597 {
3598 const equiv_class &lhs = c->m_lhs.get_obj (*m_constraints);
3599 const equiv_class &rhs = c->m_rhs.get_obj (*m_constraints);
3600 svalue_id lhs_sid = lhs.get_representative ();
3601 svalue_id rhs_sid = rhs.get_representative ();
3602 tree lhs_tree = get_representative_tree (lhs_sid);
3603 tree rhs_tree = get_representative_tree (rhs_sid);
3604 if (lhs_tree && rhs_tree
3605 && !(CONSTANT_CLASS_P (lhs_tree) && CONSTANT_CLASS_P (rhs_tree)))
3606 {
3607 dump_separator (pp, &is_first);
3608 dump_tree (pp, lhs_tree);
3609 pp_printf (pp, " %s ", constraint_op_code (c->m_op));
3610 dump_tree (pp, rhs_tree);
3611 }
3612 }
3613
3614 return;
3615 }
3616
3617 get_region (m_root_rid)->dump_to_pp (*this, m_root_rid, pp, "", true);
3618
3619 pp_string (pp, "svalues:");
3620 pp_newline (pp);
3621 int i;
3622 svalue *svalue;
3623 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3624 {
3625 pp_string (pp, " ");
3626 svalue_id sid = svalue_id::from_int (i);
3627 print_svalue (sid, pp);
3628 pp_newline (pp);
3629 }
3630
3631 pp_string (pp, "constraint manager:");
3632 pp_newline (pp);
3633 m_constraints->dump_to_pp (pp);
3634 }
3635
3636 /* Dump a multiline representation of this model to FILE. */
3637
3638 void
3639 region_model::dump (FILE *fp, bool summarize) const
3640 {
3641 pretty_printer pp;
3642 pp_format_decoder (&pp) = default_tree_printer;
3643 pp_show_color (&pp) = pp_show_color (global_dc->printer);
3644 pp.buffer->stream = fp;
3645 dump_to_pp (&pp, summarize);
3646 pp_flush (&pp);
3647 }
3648
3649 /* Dump a multiline representation of this model to stderr. */
3650
3651 DEBUG_FUNCTION void
3652 region_model::dump (bool summarize) const
3653 {
3654 dump (stderr, summarize);
3655 }
3656
3657 /* Dump RMODEL fully to stderr (i.e. without summarization). */
3658
3659 DEBUG_FUNCTION void
3660 region_model::debug () const
3661 {
3662 dump (false);
3663 }
3664
3665 /* Dump VEC to PP, in the form "{VEC elements}: LABEL". */
3666
3667 static void
3668 dump_vec_of_tree (pretty_printer *pp,
3669 bool *is_first,
3670 const auto_vec<tree> &vec,
3671 const char *label)
3672 {
3673 if (vec.length () == 0)
3674 return;
3675
3676 dump_separator (pp, is_first);
3677 pp_printf (pp, "{");
3678 unsigned i;
3679 tree key;
3680 FOR_EACH_VEC_ELT (vec, i, key)
3681 {
3682 if (i > 0)
3683 pp_string (pp, ", ");
3684 dump_tree (pp, key);
3685 }
3686 pp_printf (pp, "}: %s", label);
3687 }
3688
3689 /* Dump all *REP_PATH_VARS to PP in compact form, updating *IS_FIRST.
3690 Subroutine of region_model::dump_to_pp. */
3691
3692 void
3693 region_model::dump_summary_of_rep_path_vars (pretty_printer *pp,
3694 auto_vec<path_var> *rep_path_vars,
3695 bool *is_first)
3696 {
3697 /* Print pointers, constants, and poisoned values that aren't "uninit";
3698 gather keys for unknown and uninit values. */
3699 unsigned i;
3700 path_var *pv;
3701 auto_vec<tree> unknown_trees;
3702 auto_vec<tree> uninit_trees;
3703 FOR_EACH_VEC_ELT (*rep_path_vars, i, pv)
3704 {
3705 if (TREE_CODE (pv->m_tree) == STRING_CST)
3706 continue;
3707 tentative_region_model_context ctxt;
3708 region_id child_rid = get_lvalue (*pv, &ctxt);
3709 if (ctxt.had_errors_p ())
3710 continue;
3711 region *child_region = get_region (child_rid);
3712 if (!child_region)
3713 continue;
3714 svalue_id sid = child_region->get_value_direct ();
3715 if (sid.null_p ())
3716 continue;
3717 svalue *sval = get_svalue (sid);
3718 switch (sval->get_kind ())
3719 {
3720 default:
3721 gcc_unreachable ();
3722 case SK_REGION:
3723 {
3724 region_svalue *region_sval = as_a <region_svalue *> (sval);
3725 region_id pointee_rid = region_sval->get_pointee ();
3726 gcc_assert (!pointee_rid.null_p ());
3727 tree pointee = get_representative_path_var (pointee_rid).m_tree;
3728 dump_separator (pp, is_first);
3729 dump_tree (pp, pv->m_tree);
3730 pp_string (pp, ": ");
3731 pp_character (pp, '&');
3732 if (pointee)
3733 dump_tree (pp, pointee);
3734 else
3735 pointee_rid.print (pp);
3736 }
3737 break;
3738 case SK_CONSTANT:
3739 dump_separator (pp, is_first);
3740 dump_tree (pp, pv->m_tree);
3741 pp_string (pp, ": ");
3742 dump_tree (pp, sval->dyn_cast_constant_svalue ()->get_constant ());
3743 break;
3744 case SK_UNKNOWN:
3745 unknown_trees.safe_push (pv->m_tree);
3746 break;
3747 case SK_POISONED:
3748 {
3749 poisoned_svalue *poisoned_sval = as_a <poisoned_svalue *> (sval);
3750 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3751 if (pkind == POISON_KIND_UNINIT)
3752 uninit_trees.safe_push (pv->m_tree);
3753 else
3754 {
3755 dump_separator (pp, is_first);
3756 dump_tree (pp, pv->m_tree);
3757 pp_printf (pp, ": %s", poison_kind_to_str (pkind));
3758 }
3759 }
3760 break;
3761 case SK_SETJMP:
3762 dump_separator (pp, is_first);
3763 pp_printf (pp, "setjmp: EN: %i",
3764 sval->dyn_cast_setjmp_svalue ()->get_enode_index ());
3765 break;
3766 }
3767 }
3768
3769 /* Print unknown and uninitialized values in consolidated form. */
3770 dump_vec_of_tree (pp, is_first, unknown_trees, "unknown");
3771 dump_vec_of_tree (pp, is_first, uninit_trees, "uninit");
3772 }
3773
3774 /* Assert that this object is valid. */
3775
3776 void
3777 region_model::validate () const
3778 {
3779 /* Skip this in a release build. */
3780 #if !CHECKING_P
3781 return;
3782 #endif
3783
3784 m_constraints->validate ();
3785
3786 unsigned i;
3787 region *r;
3788 FOR_EACH_VEC_ELT (m_regions, i, r)
3789 r->validate (*this);
3790
3791 // TODO: anything else?
3792
3793 /* Verify that the stack region (if any) has an "uninitialized" value. */
3794 region *stack_region = get_root_region ()->get_stack_region (this);
3795 if (stack_region)
3796 {
3797 svalue_id stack_value_sid = stack_region->get_value_direct ();
3798 svalue *stack_value = get_svalue (stack_value_sid);
3799 gcc_assert (stack_value->get_kind () == SK_POISONED);
3800 poisoned_svalue *subclass = stack_value->dyn_cast_poisoned_svalue ();
3801 gcc_assert (subclass);
3802 gcc_assert (subclass->get_poison_kind () == POISON_KIND_UNINIT);
3803 }
3804 }
3805
3806 /* Global data for use by svalue_id_cmp_by_constant_svalue. */
3807
3808 static region_model *svalue_id_cmp_by_constant_svalue_model = NULL;
3809
3810 /* Comparator for use by region_model::canonicalize. */
3811
3812 static int
3813 svalue_id_cmp_by_constant_svalue (const void *p1, const void *p2)
3814 {
3815 const svalue_id *sid1 = (const svalue_id *)p1;
3816 const svalue_id *sid2 = (const svalue_id *)p2;
3817 gcc_assert (!sid1->null_p ());
3818 gcc_assert (!sid2->null_p ());
3819 gcc_assert (svalue_id_cmp_by_constant_svalue_model);
3820 const svalue &sval1
3821 = *svalue_id_cmp_by_constant_svalue_model->get_svalue (*sid1);
3822 const svalue &sval2
3823 = *svalue_id_cmp_by_constant_svalue_model->get_svalue (*sid2);
3824 gcc_assert (sval1.get_kind () == SK_CONSTANT);
3825 gcc_assert (sval2.get_kind () == SK_CONSTANT);
3826
3827 tree cst1 = ((const constant_svalue &)sval1).get_constant ();
3828 tree cst2 = ((const constant_svalue &)sval2).get_constant ();
3829 return tree_cmp (cst1, cst2);
3830 }
3831
3832 /* Reorder the regions and svalues into a deterministic "canonical" order,
3833 to maximize the chance of equality.
3834 If non-NULL, notify CTXT about the svalue id remapping. */
3835
3836 void
3837 region_model::canonicalize (region_model_context *ctxt)
3838 {
3839 /* Walk all regions and values in a deterministic order, visiting
3840 rids and sids, generating a rid and sid map. */
3841 canonicalization c (*this);
3842
3843 /* (1): Walk all svalues, putting constants first, sorting the constants
3844 (thus imposing an ordering on any constants that are purely referenced
3845 by constraints).
3846 Ignore other svalues for now. */
3847 {
3848 unsigned i;
3849 auto_vec<svalue_id> sids;
3850 svalue *sval;
3851 FOR_EACH_VEC_ELT (m_svalues, i, sval)
3852 {
3853 if (sval->get_kind () == SK_CONSTANT)
3854 sids.safe_push (svalue_id::from_int (i));
3855 }
3856 svalue_id_cmp_by_constant_svalue_model = this;
3857 sids.qsort (svalue_id_cmp_by_constant_svalue);
3858 svalue_id_cmp_by_constant_svalue_model = NULL;
3859 svalue_id *sid;
3860 FOR_EACH_VEC_ELT (sids, i, sid)
3861 c.walk_sid (*sid);
3862 }
3863
3864 /* (2): Walk all regions (and thus their values) in a deterministic
3865 order. */
3866 c.walk_rid (m_root_rid);
3867
3868 /* (3): Ensure we've visited everything, as we don't want to purge
3869 at this stage. Anything we visit for the first time here has
3870 arbitrary order. */
3871 {
3872 unsigned i;
3873 region *region;
3874 FOR_EACH_VEC_ELT (m_regions, i, region)
3875 c.walk_rid (region_id::from_int (i));
3876 svalue *sval;
3877 FOR_EACH_VEC_ELT (m_svalues, i, sval)
3878 c.walk_sid (svalue_id::from_int (i));
3879 }
3880
3881 /* (4): We now have a reordering of the regions and values.
3882 Apply it. */
3883 remap_svalue_ids (c.m_sid_map);
3884 remap_region_ids (c.m_rid_map);
3885 if (ctxt)
3886 ctxt->remap_svalue_ids (c.m_sid_map);
3887
3888 /* (5): Canonicalize the constraint_manager (it has already had its
3889 svalue_ids remapped above). This makes use of the new svalue_id
3890 values, and so must happen last. */
3891 m_constraints->canonicalize (get_num_svalues ());
3892
3893 validate ();
3894 }
3895
3896 /* Return true if this region_model is in canonical form. */
3897
3898 bool
3899 region_model::canonicalized_p () const
3900 {
3901 region_model copy (*this);
3902 copy.canonicalize (NULL);
3903 return *this == copy;
3904 }
3905
3906 /* A subclass of pending_diagnostic for complaining about uses of
3907 poisoned values. */
3908
3909 class poisoned_value_diagnostic
3910 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
3911 {
3912 public:
3913 poisoned_value_diagnostic (tree expr, enum poison_kind pkind)
3914 : m_expr (expr), m_pkind (pkind)
3915 {}
3916
3917 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
3918
3919 bool operator== (const poisoned_value_diagnostic &other) const
3920 {
3921 return m_expr == other.m_expr;
3922 }
3923
3924 bool emit (rich_location *rich_loc) FINAL OVERRIDE
3925 {
3926 switch (m_pkind)
3927 {
3928 default:
3929 gcc_unreachable ();
3930 case POISON_KIND_UNINIT:
3931 {
3932 diagnostic_metadata m;
3933 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
3934 return warning_meta (rich_loc, m,
3935 OPT_Wanalyzer_use_of_uninitialized_value,
3936 "use of uninitialized value %qE",
3937 m_expr);
3938 }
3939 break;
3940 case POISON_KIND_FREED:
3941 {
3942 diagnostic_metadata m;
3943 m.add_cwe (416); /* "CWE-416: Use After Free". */
3944 return warning_meta (rich_loc, m,
3945 OPT_Wanalyzer_use_after_free,
3946 "use after %<free%> of %qE",
3947 m_expr);
3948 }
3949 break;
3950 case POISON_KIND_POPPED_STACK:
3951 {
3952 /* TODO: which CWE? */
3953 return warning_at (rich_loc,
3954 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
3955 "use of pointer %qE within stale stack frame",
3956 m_expr);
3957 }
3958 break;
3959 }
3960 }
3961
3962 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
3963 {
3964 switch (m_pkind)
3965 {
3966 default:
3967 gcc_unreachable ();
3968 case POISON_KIND_UNINIT:
3969 return ev.formatted_print ("use of uninitialized value %qE here",
3970 m_expr);
3971 case POISON_KIND_FREED:
3972 return ev.formatted_print ("use after %<free%> of %qE here",
3973 m_expr);
3974 case POISON_KIND_POPPED_STACK:
3975 return ev.formatted_print
3976 ("use of pointer %qE within stale stack frame here",
3977 m_expr);
3978 }
3979 }
3980
3981 private:
3982 tree m_expr;
3983 enum poison_kind m_pkind;
3984 };
3985
3986 /* Determine if EXPR is poisoned, and if so, queue a diagnostic to CTXT. */
3987
3988 void
3989 region_model::check_for_poison (tree expr, region_model_context *ctxt)
3990 {
3991 if (!ctxt)
3992 return;
3993
3994 // TODO: this is disabled for now (too many false positives)
3995 return;
3996
3997 svalue_id expr_sid = get_rvalue (expr, ctxt);
3998 gcc_assert (!expr_sid.null_p ());
3999 svalue *expr_svalue = get_svalue (expr_sid);
4000 gcc_assert (expr_svalue);
4001 if (const poisoned_svalue *poisoned_sval
4002 = expr_svalue->dyn_cast_poisoned_svalue ())
4003 {
4004 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
4005 ctxt->warn (new poisoned_value_diagnostic (expr, pkind));
4006 }
4007 }
4008
4009 /* Update this model for the ASSIGN stmt, using CTXT to report any
4010 diagnostics. */
4011
4012 void
4013 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
4014 {
4015 tree lhs = gimple_assign_lhs (assign);
4016 tree rhs1 = gimple_assign_rhs1 (assign);
4017
4018 region_id lhs_rid = get_lvalue (lhs, ctxt);
4019
4020 /* Check for uses of poisoned values. */
4021 switch (get_gimple_rhs_class (gimple_expr_code (assign)))
4022 {
4023 case GIMPLE_INVALID_RHS:
4024 gcc_unreachable ();
4025 break;
4026 case GIMPLE_TERNARY_RHS:
4027 check_for_poison (gimple_assign_rhs3 (assign), ctxt);
4028 /* Fallthru */
4029 case GIMPLE_BINARY_RHS:
4030 check_for_poison (gimple_assign_rhs2 (assign), ctxt);
4031 /* Fallthru */
4032 case GIMPLE_UNARY_RHS:
4033 case GIMPLE_SINGLE_RHS:
4034 check_for_poison (gimple_assign_rhs1 (assign), ctxt);
4035 }
4036
4037 if (lhs_rid.null_p ())
4038 return;
4039 // TODO: issue a warning for this case
4040
4041 enum tree_code op = gimple_assign_rhs_code (assign);
4042 switch (op)
4043 {
4044 default:
4045 {
4046 if (0)
4047 sorry_at (assign->location, "unhandled assignment op: %qs",
4048 get_tree_code_name (op));
4049 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4050 }
4051 break;
4052
4053 case BIT_FIELD_REF:
4054 {
4055 // TODO
4056 }
4057 break;
4058
4059 case CONSTRUCTOR:
4060 {
4061 /* e.g. "x ={v} {CLOBBER};" */
4062 // TODO
4063 }
4064 break;
4065
4066 case POINTER_PLUS_EXPR:
4067 {
4068 /* e.g. "_1 = a_10(D) + 12;" */
4069 tree ptr = rhs1;
4070 tree offset = gimple_assign_rhs2 (assign);
4071
4072 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
4073 svalue_id offset_sid = get_rvalue (offset, ctxt);
4074 region_id element_rid
4075 = get_or_create_pointer_plus_expr (TREE_TYPE (TREE_TYPE (ptr)),
4076 ptr_sid, offset_sid,
4077 ctxt);
4078 svalue_id element_ptr_sid
4079 = get_or_create_ptr_svalue (TREE_TYPE (ptr), element_rid);
4080 set_value (lhs_rid, element_ptr_sid, ctxt);
4081 }
4082 break;
4083
4084 case POINTER_DIFF_EXPR:
4085 {
4086 /* e.g. "_1 = p_2(D) - q_3(D);". */
4087
4088 /* TODO. */
4089
4090 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4091 }
4092 break;
4093
4094 case ADDR_EXPR:
4095 {
4096 /* LHS = &RHS; */
4097 svalue_id ptr_sid = get_rvalue (rhs1, ctxt);
4098 set_value (lhs_rid, ptr_sid, ctxt);
4099 }
4100 break;
4101
4102 case MEM_REF:
4103 {
4104 region_id rhs_rid = get_lvalue (rhs1, ctxt);
4105 svalue_id rhs_sid
4106 = get_region (rhs_rid)->get_value (*this, true, ctxt);
4107 set_value (lhs_rid, rhs_sid, ctxt);
4108 }
4109 break;
4110
4111 case REAL_CST:
4112 case INTEGER_CST:
4113 case ARRAY_REF:
4114 {
4115 /* LHS = RHS; */
4116 svalue_id cst_sid = get_rvalue (rhs1, ctxt);
4117 set_value (lhs_rid, cst_sid, ctxt);
4118 }
4119 break;
4120
4121 case FIX_TRUNC_EXPR:
4122 case FLOAT_EXPR:
4123 case NOP_EXPR:
4124 // cast: TODO
4125 // fall though for now
4126 case SSA_NAME:
4127 case VAR_DECL:
4128 case PARM_DECL:
4129 {
4130 /* LHS = VAR; */
4131 svalue_id var_sid = get_rvalue (rhs1, ctxt);
4132 set_value (lhs_rid, var_sid, ctxt);
4133 }
4134 break;
4135
4136 case EQ_EXPR:
4137 case GE_EXPR:
4138 case LE_EXPR:
4139 case NE_EXPR:
4140 case GT_EXPR:
4141 case LT_EXPR:
4142 {
4143 tree rhs2 = gimple_assign_rhs2 (assign);
4144
4145 // TODO: constraints between svalues
4146 svalue_id rhs1_sid = get_rvalue (rhs1, ctxt);
4147 svalue_id rhs2_sid = get_rvalue (rhs2, ctxt);
4148
4149 tristate t = eval_condition (rhs1_sid, op, rhs2_sid);
4150 if (t.is_known ())
4151 set_value (lhs_rid,
4152 get_rvalue (t.is_true ()
4153 ? boolean_true_node
4154 : boolean_false_node,
4155 ctxt),
4156 ctxt);
4157 else
4158 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4159 }
4160 break;
4161
4162 case NEGATE_EXPR:
4163 case BIT_NOT_EXPR:
4164 {
4165 // TODO: unary ops
4166
4167 // TODO: constant?
4168
4169 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4170 }
4171 break;
4172
4173 case PLUS_EXPR:
4174 case MINUS_EXPR:
4175 case MULT_EXPR:
4176 case TRUNC_DIV_EXPR:
4177 case TRUNC_MOD_EXPR:
4178 case LSHIFT_EXPR:
4179 case RSHIFT_EXPR:
4180 case BIT_IOR_EXPR:
4181 case BIT_XOR_EXPR:
4182 case BIT_AND_EXPR:
4183 case MIN_EXPR:
4184 case MAX_EXPR:
4185 {
4186 /* Binary ops. */
4187 tree rhs2 = gimple_assign_rhs2 (assign);
4188
4189 svalue_id rhs1_sid = get_rvalue (rhs1, ctxt);
4190 svalue_id rhs2_sid = get_rvalue (rhs2, ctxt);
4191
4192 if (tree rhs1_cst = maybe_get_constant (rhs1_sid))
4193 if (tree rhs2_cst = maybe_get_constant (rhs2_sid))
4194 {
4195 tree result = fold_binary (op, TREE_TYPE (lhs),
4196 rhs1_cst, rhs2_cst);
4197 if (result && CONSTANT_CLASS_P (result))
4198 {
4199 svalue_id result_sid
4200 = get_or_create_constant_svalue (result);
4201 set_value (lhs_rid, result_sid, ctxt);
4202 return;
4203 }
4204 }
4205 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4206 }
4207 break;
4208
4209 case COMPONENT_REF:
4210 {
4211 /* LHS = op0.op1; */
4212 region_id child_rid = get_lvalue (rhs1, ctxt);
4213 svalue_id child_sid
4214 = get_region (child_rid)->get_value (*this, true, ctxt);
4215 set_value (lhs_rid, child_sid, ctxt);
4216 }
4217 break;
4218 }
4219 }
4220
4221 /* Update this model for the CALL stmt, using CTXT to report any
4222 diagnostics - the first half.
4223
4224 Updates to the region_model that should be made *before* sm-states
4225 are updated are done here; other updates to the region_model are done
4226 in region_model::on_call_post.
4227
4228 Return true if the function call has unknown side effects (it wasn't
4229 recognized and we don't have a body for it, or are unable to tell which
4230 fndecl it is). */
4231
4232 bool
4233 region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
4234 {
4235 region_id lhs_rid;
4236 tree lhs_type = NULL_TREE;
4237 if (tree lhs = gimple_call_lhs (call))
4238 {
4239 lhs_rid = get_lvalue (lhs, ctxt);
4240 lhs_type = TREE_TYPE (lhs);
4241 }
4242
4243 /* Check for uses of poisoned values.
4244 For now, special-case "free", to avoid warning about "use-after-free"
4245 when "double free" would be more precise. */
4246 if (!is_special_named_call_p (call, "free", 1))
4247 for (unsigned i = 0; i < gimple_call_num_args (call); i++)
4248 check_for_poison (gimple_call_arg (call, i), ctxt);
4249
4250 bool unknown_side_effects = false;
4251
4252 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4253 {
4254 if (is_named_call_p (callee_fndecl, "malloc", call, 1))
4255 {
4256 // TODO: capture size as a svalue?
4257 region_id new_rid = add_new_malloc_region ();
4258 if (!lhs_rid.null_p ())
4259 {
4260 svalue_id ptr_sid
4261 = get_or_create_ptr_svalue (lhs_type, new_rid);
4262 set_value (lhs_rid, ptr_sid, ctxt);
4263 }
4264 return false;
4265 }
4266 else if (is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
4267 {
4268 region_id frame_rid = get_current_frame_id ();
4269 region_id new_rid
4270 = add_region (new symbolic_region (frame_rid, NULL_TREE, false));
4271 if (!lhs_rid.null_p ())
4272 {
4273 svalue_id ptr_sid
4274 = get_or_create_ptr_svalue (lhs_type, new_rid);
4275 set_value (lhs_rid, ptr_sid, ctxt);
4276 }
4277 return false;
4278 }
4279 else if (gimple_call_builtin_p (call, BUILT_IN_EXPECT)
4280 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY)
4281 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT))
4282 {
4283 /* __builtin_expect's return value is its initial argument. */
4284 if (!lhs_rid.null_p ())
4285 {
4286 tree initial_arg = gimple_call_arg (call, 0);
4287 svalue_id sid = get_rvalue (initial_arg, ctxt);
4288 set_value (lhs_rid, sid, ctxt);
4289 }
4290 return false;
4291 }
4292 else if (is_named_call_p (callee_fndecl, "strlen", call, 1))
4293 {
4294 region_id buf_rid = deref_rvalue (gimple_call_arg (call, 0), ctxt);
4295 svalue_id buf_sid
4296 = get_region (buf_rid)->get_value (*this, true, ctxt);
4297 if (tree cst_expr = maybe_get_constant (buf_sid))
4298 {
4299 if (TREE_CODE (cst_expr) == STRING_CST
4300 && !lhs_rid.null_p ())
4301 {
4302 /* TREE_STRING_LENGTH is sizeof, not strlen. */
4303 int sizeof_cst = TREE_STRING_LENGTH (cst_expr);
4304 int strlen_cst = sizeof_cst - 1;
4305 tree t_cst = build_int_cst (lhs_type, strlen_cst);
4306 svalue_id result_sid
4307 = get_or_create_constant_svalue (t_cst);
4308 set_value (lhs_rid, result_sid, ctxt);
4309 return false;
4310 }
4311 }
4312 /* Otherwise an unknown value. */
4313 }
4314 else if (is_named_call_p (callee_fndecl,
4315 "__analyzer_dump_num_heap_regions", call, 0))
4316 {
4317 /* Handle the builtin "__analyzer_dump_num_heap_regions" by emitting
4318 a warning (for use in DejaGnu tests). */
4319 int num_heap_regions = 0;
4320 region_id heap_rid = get_root_region ()->ensure_heap_region (this);
4321 unsigned i;
4322 region *region;
4323 FOR_EACH_VEC_ELT (m_regions, i, region)
4324 if (region->get_parent () == heap_rid)
4325 num_heap_regions++;
4326 /* Use quotes to ensure the output isn't truncated. */
4327 warning_at (call->location, 0,
4328 "num heap regions: %qi", num_heap_regions);
4329 return false;
4330 }
4331 else if (!fndecl_has_gimple_body_p (callee_fndecl)
4332 && !DECL_PURE_P (callee_fndecl))
4333 unknown_side_effects = true;
4334 }
4335 else
4336 unknown_side_effects = true;
4337
4338 /* Unknown return value. */
4339 if (!lhs_rid.null_p ())
4340 set_to_new_unknown_value (lhs_rid, lhs_type, ctxt);
4341
4342 return unknown_side_effects;
4343 }
4344
4345 /* Update this model for the CALL stmt, using CTXT to report any
4346 diagnostics - the second half.
4347
4348 Updates to the region_model that should be made *after* sm-states
4349 are updated are done here; other updates to the region_model are done
4350 in region_model::on_call_pre.
4351
4352 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
4353 to purge state. */
4354
4355 void
4356 region_model::on_call_post (const gcall *call,
4357 bool unknown_side_effects,
4358 region_model_context *ctxt)
4359 {
4360 /* Update for "free" here, after sm-handling.
4361
4362 If the ptr points to an underlying heap region, delete the region,
4363 poisoning pointers to it and regions within it.
4364
4365 We delay this until after sm-state has been updated so that the
4366 sm-handling can transition all of the various casts of the pointer
4367 to a "freed" state *before* we delete the related region here.
4368
4369 This has to be done here so that the sm-handling can use the fact
4370 that they point to the same region to establish that they are equal
4371 (in region_model::eval_condition_without_cm), and thus transition
4372 all pointers to the region to the "freed" state together, regardless
4373 of casts. */
4374 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4375 if (is_named_call_p (callee_fndecl, "free", call, 1))
4376 {
4377 tree ptr = gimple_call_arg (call, 0);
4378 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
4379 svalue *ptr_sval = get_svalue (ptr_sid);
4380 if (region_svalue *ptr_to_region_sval
4381 = ptr_sval->dyn_cast_region_svalue ())
4382 {
4383 /* If the ptr points to an underlying heap region, delete it,
4384 poisoning pointers. */
4385 region_id pointee_rid = ptr_to_region_sval->get_pointee ();
4386 region_id heap_rid = get_root_region ()->ensure_heap_region (this);
4387 if (!pointee_rid.null_p ()
4388 && get_region (pointee_rid)->get_parent () == heap_rid)
4389 {
4390 purge_stats stats;
4391 delete_region_and_descendents (pointee_rid,
4392 POISON_KIND_FREED,
4393 &stats, ctxt->get_logger ());
4394 purge_unused_svalues (&stats, ctxt);
4395 validate ();
4396 // TODO: do anything with stats?
4397 }
4398 }
4399 return;
4400 }
4401
4402 if (unknown_side_effects)
4403 handle_unrecognized_call (call, ctxt);
4404 }
4405
4406 /* Helper class for region_model::handle_unrecognized_call, for keeping
4407 track of all regions that are reachable, and, of those, which are
4408 mutable. */
4409
4410 class reachable_regions
4411 {
4412 public:
4413 reachable_regions (region_model *model)
4414 : m_model (model), m_reachable_rids (), m_mutable_rids ()
4415 {}
4416
4417 /* Lazily mark RID as being reachable, recursively adding regions
4418 reachable from RID. */
4419 void add (region_id rid, bool is_mutable)
4420 {
4421 gcc_assert (!rid.null_p ());
4422
4423 unsigned idx = rid.as_int ();
4424 /* Bail out if this region is already in the sets at the IS_MUTABLE
4425 level of mutability. */
4426 if (!is_mutable && bitmap_bit_p (m_reachable_rids, idx))
4427 return;
4428 bitmap_set_bit (m_reachable_rids, idx);
4429
4430 if (is_mutable)
4431 {
4432 if (bitmap_bit_p (m_mutable_rids, idx))
4433 return;
4434 else
4435 bitmap_set_bit (m_mutable_rids, idx);
4436 }
4437
4438 /* If this region's value is a pointer, add the pointee. */
4439 region *reg = m_model->get_region (rid);
4440 svalue_id sid = reg->get_value_direct ();
4441 svalue *sval = m_model->get_svalue (sid);
4442 if (sval)
4443 if (region_svalue *ptr = sval->dyn_cast_region_svalue ())
4444 {
4445 region_id pointee_rid = ptr->get_pointee ();
4446 /* Use const-ness of pointer type to affect mutability. */
4447 bool ptr_is_mutable = true;
4448 if (ptr->get_type ()
4449 && TREE_CODE (ptr->get_type ()) == POINTER_TYPE
4450 && TYPE_READONLY (TREE_TYPE (ptr->get_type ())))
4451 ptr_is_mutable = false;
4452 add (pointee_rid, ptr_is_mutable);
4453 }
4454
4455 /* Add descendents of this region. */
4456 region_id_set descendents (m_model);
4457 m_model->get_descendents (rid, &descendents, region_id::null ());
4458 for (unsigned i = 0; i < m_model->get_num_regions (); i++)
4459 {
4460 region_id iter_rid = region_id::from_int (i);
4461 if (descendents.region_p (iter_rid))
4462 add (iter_rid, is_mutable);
4463 }
4464 }
4465
4466 bool mutable_p (region_id rid)
4467 {
4468 gcc_assert (!rid.null_p ());
4469 return bitmap_bit_p (m_mutable_rids, rid.as_int ());
4470 }
4471
4472 private:
4473 region_model *m_model;
4474
4475 /* The region ids already seen. This has to be an auto_bitmap rather than
4476 an auto_sbitmap as new regions can be created within the model during
4477 the traversal. */
4478 auto_bitmap m_reachable_rids;
4479
4480 /* The region_ids that can be changed (accessed via non-const pointers). */
4481 auto_bitmap m_mutable_rids;
4482 };
4483
4484 /* Handle a call CALL to a function with unknown behavior.
4485
4486 Traverse the regions in this model, determining what regions are
4487 reachable from pointer arguments to CALL and from global variables,
4488 recursively.
4489
4490 Set all reachable regions to new unknown values and purge sm-state
4491 from their values, and from values that point to them. */
4492
4493 void
4494 region_model::handle_unrecognized_call (const gcall *call,
4495 region_model_context *ctxt)
4496 {
4497 tree fndecl = get_fndecl_for_call (call, ctxt);
4498
4499 reachable_regions reachable_regions (this);
4500
4501 /* Determine the reachable regions and their mutability. */
4502 {
4503 /* Globals. */
4504 region_id globals_rid = get_globals_region_id ();
4505 if (!globals_rid.null_p ())
4506 reachable_regions.add (globals_rid, true);
4507
4508 /* Params that are pointers. */
4509 tree iter_param_types = NULL_TREE;
4510 if (fndecl)
4511 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4512 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
4513 {
4514 /* Track expected param type, where available. */
4515 tree param_type = NULL_TREE;
4516 if (iter_param_types)
4517 {
4518 param_type = TREE_VALUE (iter_param_types);
4519 gcc_assert (param_type);
4520 iter_param_types = TREE_CHAIN (iter_param_types);
4521 }
4522
4523 tree parm = gimple_call_arg (call, arg_idx);
4524 svalue_id parm_sid = get_rvalue (parm, ctxt);
4525 svalue *parm_sval = get_svalue (parm_sid);
4526 if (parm_sval)
4527 if (region_svalue *parm_ptr = parm_sval->dyn_cast_region_svalue ())
4528 {
4529 region_id pointee_rid = parm_ptr->get_pointee ();
4530 bool is_mutable = true;
4531 if (param_type
4532 && TREE_CODE (param_type) == POINTER_TYPE
4533 && TYPE_READONLY (TREE_TYPE (param_type)))
4534 is_mutable = false;
4535 reachable_regions.add (pointee_rid, is_mutable);
4536 }
4537 // FIXME: what about compound parms that contain ptrs?
4538 }
4539 }
4540
4541 /* OK: we now have all reachable regions.
4542 Set them all to new unknown values. */
4543 for (unsigned i = 0; i < get_num_regions (); i++)
4544 {
4545 region_id iter_rid = region_id::from_int (i);
4546 if (reachable_regions.mutable_p (iter_rid))
4547 {
4548 region *reg = get_region (iter_rid);
4549
4550 /* Purge any sm-state for any underlying svalue. */
4551 svalue_id curr_sid = reg->get_value_direct ();
4552 if (!curr_sid.null_p ())
4553 ctxt->on_unknown_change (curr_sid);
4554
4555 set_to_new_unknown_value (iter_rid,
4556 reg->get_type (),
4557 ctxt);
4558 }
4559 }
4560
4561 /* Purge sm-state for any remaining svalues that point to regions that
4562 were reachable. This helps suppress leak false-positives.
4563
4564 For example, if we had a malloc call that was cast to a "foo *" type,
4565 we could have a temporary void * for the result of malloc which has its
4566 own svalue, not reachable from the function call, but for which the
4567 "foo *" svalue was reachable. If we don't purge it, the temporary will
4568 be reported as a leak. */
4569 int i;
4570 svalue *svalue;
4571 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
4572 if (region_svalue *ptr = svalue->dyn_cast_region_svalue ())
4573 {
4574 region_id pointee_rid = ptr->get_pointee ();
4575 if (reachable_regions.mutable_p (pointee_rid))
4576 ctxt->on_unknown_change (svalue_id::from_int (i));
4577 }
4578
4579 validate ();
4580 }
4581
4582 /* Update this model for the RETURN_STMT, using CTXT to report any
4583 diagnostics. */
4584
4585 void
4586 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
4587 {
4588 tree callee = get_current_function ()->decl;
4589 tree lhs = DECL_RESULT (callee);
4590 tree rhs = gimple_return_retval (return_stmt);
4591
4592 if (lhs && rhs)
4593 set_value (get_lvalue (lhs, ctxt), get_rvalue (rhs, ctxt), ctxt);
4594 }
4595
4596 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
4597 ENODE, using CTXT to report any diagnostics.
4598
4599 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
4600 0), as opposed to any second return due to longjmp/sigsetjmp. */
4601
4602 void
4603 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
4604 region_model_context *ctxt)
4605 {
4606 region_id buf_rid = deref_rvalue (gimple_call_arg (call, 0), ctxt);
4607 region *buf = get_region (buf_rid);
4608
4609 /* Create a setjmp_svalue for this call and store it in BUF_RID's region. */
4610 if (buf)
4611 {
4612 setjmp_record r (enode, call);
4613 svalue *sval = new setjmp_svalue (r, buf->get_type ());
4614 svalue_id new_sid = add_svalue (sval);
4615 set_value (buf_rid, new_sid, ctxt);
4616 }
4617
4618 /* Direct calls to setjmp return 0. */
4619 if (tree lhs = gimple_call_lhs (call))
4620 {
4621 tree zero = build_int_cst (TREE_TYPE (lhs), 0);
4622 svalue_id new_sid = get_or_create_constant_svalue (zero);
4623 region_id lhs_rid = get_lvalue (lhs, ctxt);
4624 set_value (lhs_rid, new_sid, ctxt);
4625 }
4626 }
4627
4628 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
4629 to a "setjmp" at SETJMP_CALL where the final stack depth should be
4630 SETJMP_STACK_DEPTH. Purge any stack frames, potentially reporting on
4631 leaks to CTXT. */
4632
4633 void
4634 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
4635 int setjmp_stack_depth,
4636 region_model_context *ctxt)
4637 {
4638 /* Evaluate the val, using the frame of the "longjmp". */
4639 tree fake_retval = gimple_call_arg (longjmp_call, 1);
4640 svalue_id fake_retval_sid = get_rvalue (fake_retval, ctxt);
4641
4642 /* Pop any frames until we reach the stack depth of the function where
4643 setjmp was called. */
4644 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
4645 while (get_stack_depth () > setjmp_stack_depth)
4646 {
4647 /* Don't purge unused svalues yet, as we're using fake_retval_sid. */
4648 pop_frame (false, NULL, ctxt);
4649 }
4650
4651 gcc_assert (get_stack_depth () == setjmp_stack_depth);
4652
4653 /* Assign to LHS of "setjmp" in new_state. */
4654 if (tree lhs = gimple_call_lhs (setjmp_call))
4655 {
4656 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
4657 tree t_zero = build_int_cst (TREE_TYPE (fake_retval), 0);
4658 svalue_id zero_sid = get_or_create_constant_svalue (t_zero);
4659 tristate eq_zero = eval_condition (fake_retval_sid, EQ_EXPR, zero_sid);
4660 /* If we have 0, use 1. */
4661 if (eq_zero.is_true ())
4662 {
4663 tree t_one = build_int_cst (TREE_TYPE (fake_retval), 1);
4664 svalue_id one_sid = get_or_create_constant_svalue (t_one);
4665 fake_retval_sid = one_sid;
4666 }
4667 else
4668 {
4669 /* Otherwise note that the value is nonzero. */
4670 m_constraints->add_constraint (fake_retval_sid, NE_EXPR, zero_sid);
4671 }
4672
4673 region_id lhs_rid = get_lvalue (lhs, ctxt);
4674 set_value (lhs_rid, fake_retval_sid, ctxt);
4675 }
4676
4677 /* Now that we've assigned the fake_retval, we can purge the unused
4678 svalues, which could detect leaks. */
4679 purge_unused_svalues (NULL, ctxt, NULL);
4680 validate ();
4681 }
4682
4683 /* Update this region_model for a phi stmt of the form
4684 LHS = PHI <...RHS...>.
4685 where RHS is for the appropriate edge. */
4686
4687 void
4688 region_model::handle_phi (const gphi *phi,
4689 tree lhs, tree rhs, bool is_back_edge,
4690 region_model_context *ctxt)
4691 {
4692 /* For now, don't bother tracking the .MEM SSA names. */
4693 if (tree var = SSA_NAME_VAR (lhs))
4694 if (TREE_CODE (var) == VAR_DECL)
4695 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
4696 return;
4697
4698 svalue_id rhs_sid = get_rvalue (rhs, ctxt);
4699
4700 if (is_back_edge && get_svalue (rhs_sid)->get_kind () != SK_UNKNOWN)
4701 {
4702 /* If we have a back edge, we probably have a loop.
4703 Use an unknown value, to avoid effectively unrolling the
4704 loop.
4705 To terminate, we need to avoid generating a series of
4706 models with an unbounded monotonically increasing number of
4707 redundant unknown values; hence we need to purge svalues
4708 before inserting the state into the exploded graph, to
4709 collect unused svalues. */
4710 set_to_new_unknown_value (get_lvalue (lhs, ctxt), TREE_TYPE (lhs), ctxt);
4711 }
4712 else
4713 set_value (get_lvalue (lhs, ctxt), rhs_sid, ctxt);
4714
4715 if (ctxt)
4716 ctxt->on_phi (phi, rhs);
4717 }
4718
4719 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
4720
4721 Get the id of the region for PV within this region_model,
4722 emitting any diagnostics to CTXT. */
4723
4724 region_id
4725 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt)
4726 {
4727 tree expr = pv.m_tree;
4728
4729 gcc_assert (expr);
4730
4731 switch (TREE_CODE (expr))
4732 {
4733 default:
4734 return make_region_for_unexpected_tree_code (ctxt, expr,
4735 dump_location_t ());
4736
4737 case ARRAY_REF:
4738 {
4739 tree array = TREE_OPERAND (expr, 0);
4740 tree index = TREE_OPERAND (expr, 1);
4741 #if 0
4742 // TODO: operands 2 and 3, if present:
4743 gcc_assert (TREE_OPERAND (expr, 2) == NULL_TREE);
4744 gcc_assert (TREE_OPERAND (expr, 3) == NULL_TREE);
4745 #endif
4746
4747 region_id array_rid = get_lvalue (array, ctxt);
4748 svalue_id index_sid = get_rvalue (index, ctxt);
4749 region *base_array_reg = get_region (array_rid);
4750 array_region *array_reg = base_array_reg->dyn_cast_array_region ();
4751 if (!array_reg)
4752 {
4753 /* Normally, array_rid ought to refer to an array_region, since
4754 array's type will be ARRAY_TYPE. However, if we have an
4755 unexpected tree code for array, we could have a
4756 symbolic_region here. If so, we're in error-handling. */
4757 gcc_assert (base_array_reg->get_type () == NULL_TREE);
4758 return make_region_for_unexpected_tree_code (ctxt, expr,
4759 dump_location_t ());
4760 }
4761 return array_reg->get_element (this, array_rid, index_sid, ctxt);
4762 }
4763 break;
4764
4765 case BIT_FIELD_REF:
4766 {
4767 /* For now, create a view, as if a cast, ignoring the bit positions. */
4768 tree obj = TREE_OPERAND (expr, 0);
4769 return get_or_create_view (get_lvalue (obj, ctxt), TREE_TYPE (expr),
4770 ctxt);
4771 };
4772 break;
4773
4774 case MEM_REF:
4775 {
4776 tree ptr = TREE_OPERAND (expr, 0);
4777 tree offset = TREE_OPERAND (expr, 1);
4778 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
4779 svalue_id offset_sid = get_rvalue (offset, ctxt);
4780 return get_or_create_mem_ref (TREE_TYPE (expr), ptr_sid,
4781 offset_sid, ctxt);
4782 }
4783 break;
4784
4785 case VAR_DECL:
4786 /* Handle globals. */
4787 if (is_global_var (expr))
4788 {
4789 region_id globals_rid
4790 = get_root_region ()->ensure_globals_region (this);
4791 map_region *globals = get_region<map_region> (globals_rid);
4792 region_id var_rid = globals->get_or_create (this, globals_rid, expr,
4793 TREE_TYPE (expr), ctxt);
4794 return var_rid;
4795 }
4796
4797 /* Fall through. */
4798
4799 case SSA_NAME:
4800 case PARM_DECL:
4801 case RESULT_DECL:
4802 {
4803 gcc_assert (TREE_CODE (expr) == SSA_NAME
4804 || TREE_CODE (expr) == PARM_DECL
4805 || TREE_CODE (expr) == VAR_DECL
4806 || TREE_CODE (expr) == RESULT_DECL);
4807
4808 int stack_depth = pv.m_stack_depth;
4809 stack_region *stack = get_root_region ()->get_stack_region (this);
4810 gcc_assert (stack);
4811 region_id frame_rid = stack->get_frame_rid (stack_depth);
4812 frame_region *frame = get_region <frame_region> (frame_rid);
4813 gcc_assert (frame);
4814 region_id child_rid = frame->get_or_create (this, frame_rid, expr,
4815 TREE_TYPE (expr), ctxt);
4816 return child_rid;
4817 }
4818
4819 case COMPONENT_REF:
4820 {
4821 /* obj.field */
4822 tree obj = TREE_OPERAND (expr, 0);
4823 tree field = TREE_OPERAND (expr, 1);
4824 tree obj_type = TREE_TYPE (obj);
4825 if (TREE_CODE (obj_type) != RECORD_TYPE
4826 && TREE_CODE (obj_type) != UNION_TYPE)
4827 return make_region_for_unexpected_tree_code (ctxt, obj_type,
4828 dump_location_t ());
4829 region_id obj_rid = get_lvalue (obj, ctxt);
4830 region_id struct_or_union_rid
4831 = get_or_create_view (obj_rid, TREE_TYPE (obj), ctxt);
4832 return get_field_region (struct_or_union_rid, field, ctxt);
4833 }
4834 break;
4835
4836 case CONST_DECL:
4837 {
4838 tree cst_type = TREE_TYPE (expr);
4839 region_id cst_rid = add_region_for_type (m_root_rid, cst_type, ctxt);
4840 if (tree value = DECL_INITIAL (expr))
4841 {
4842 svalue_id sid = get_rvalue (value, ctxt);
4843 get_region (cst_rid)->set_value (*this, cst_rid, sid, ctxt);
4844 }
4845 return cst_rid;
4846 }
4847 break;
4848
4849 case STRING_CST:
4850 {
4851 tree cst_type = TREE_TYPE (expr);
4852 array_region *cst_region = new array_region (m_root_rid, cst_type);
4853 region_id cst_rid = add_region (cst_region);
4854 svalue_id cst_sid = get_or_create_constant_svalue (expr);
4855 cst_region->set_value (*this, cst_rid, cst_sid, ctxt);
4856 return cst_rid;
4857 }
4858 break;
4859
4860 case NOP_EXPR:
4861 case VIEW_CONVERT_EXPR:
4862 {
4863 tree obj = TREE_OPERAND (expr, 0);
4864 return get_or_create_view (get_lvalue (obj, ctxt), TREE_TYPE (expr),
4865 ctxt);
4866 };
4867 break;
4868 }
4869 }
4870
4871 /* If we see a tree code we don't know how to handle, rather than
4872 ICE or generate bogus results, create a dummy region, and notify
4873 CTXT so that it can mark the new state as being not properly
4874 modelled. The exploded graph can then stop exploring that path,
4875 since any diagnostics we might issue will have questionable
4876 validity. */
4877
4878 region_id
4879 region_model::make_region_for_unexpected_tree_code (region_model_context *ctxt,
4880 tree t,
4881 const dump_location_t &loc)
4882 {
4883 gcc_assert (ctxt);
4884 region_id new_rid
4885 = add_region (new symbolic_region (m_root_rid, NULL_TREE, false));
4886 ctxt->on_unexpected_tree_code (t, loc);
4887 return new_rid;
4888 }
4889
4890 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
4891
4892 static void
4893 assert_compat_types (tree src_type, tree dst_type)
4894 {
4895 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
4896 gcc_checking_assert (useless_type_conversion_p (src_type, dst_type));
4897 }
4898
4899 /* Get the id of the region for PV within this region_model,
4900 emitting any diagnostics to CTXT. */
4901
4902 region_id
4903 region_model::get_lvalue (path_var pv, region_model_context *ctxt)
4904 {
4905 if (pv.m_tree == NULL_TREE)
4906 return region_id::null ();
4907
4908 region_id result_rid = get_lvalue_1 (pv, ctxt);
4909 assert_compat_types (get_region (result_rid)->get_type (),
4910 TREE_TYPE (pv.m_tree));
4911 return result_rid;
4912 }
4913
4914 /* Get the region_id for EXPR within this region_model (assuming the most
4915 recent stack frame if it's a local). */
4916
4917 region_id
4918 region_model::get_lvalue (tree expr, region_model_context *ctxt)
4919 {
4920 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
4921 }
4922
4923 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
4924
4925 Get the value of PV within this region_model,
4926 emitting any diagnostics to CTXT. */
4927
4928 svalue_id
4929 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt)
4930 {
4931 gcc_assert (pv.m_tree);
4932
4933 switch (TREE_CODE (pv.m_tree))
4934 {
4935 default:
4936 {
4937 svalue *unknown_sval = new unknown_svalue (TREE_TYPE (pv.m_tree));
4938 return add_svalue (unknown_sval);
4939 }
4940 break;
4941
4942 case ADDR_EXPR:
4943 {
4944 /* "&EXPR". */
4945 tree expr = pv.m_tree;
4946 tree op0 = TREE_OPERAND (expr, 0);
4947 if (TREE_CODE (op0) == FUNCTION_DECL)
4948 return get_svalue_for_fndecl (TREE_TYPE (expr), op0, ctxt);
4949 else if (TREE_CODE (op0) == LABEL_DECL)
4950 return get_svalue_for_label (TREE_TYPE (expr), op0, ctxt);
4951 region_id expr_rid = get_lvalue (op0, ctxt);
4952 return get_or_create_ptr_svalue (TREE_TYPE (expr), expr_rid);
4953 }
4954 break;
4955
4956 case ARRAY_REF:
4957 {
4958 region_id element_rid = get_lvalue (pv, ctxt);
4959 return get_region (element_rid)->get_value (*this, true, ctxt);
4960 }
4961
4962 case INTEGER_CST:
4963 case REAL_CST:
4964 case STRING_CST:
4965 return get_or_create_constant_svalue (pv.m_tree);
4966
4967 case COMPONENT_REF:
4968 case MEM_REF:
4969 case SSA_NAME:
4970 case VAR_DECL:
4971 case PARM_DECL:
4972 case RESULT_DECL:
4973 {
4974 region_id var_rid = get_lvalue (pv, ctxt);
4975 return get_region (var_rid)->get_value (*this, true, ctxt);
4976 }
4977 }
4978 }
4979
4980 /* Get the value of PV within this region_model,
4981 emitting any diagnostics to CTXT. */
4982
4983 svalue_id
4984 region_model::get_rvalue (path_var pv, region_model_context *ctxt)
4985 {
4986 if (pv.m_tree == NULL_TREE)
4987 return svalue_id::null ();
4988 svalue_id result_sid = get_rvalue_1 (pv, ctxt);
4989
4990 assert_compat_types (get_svalue (result_sid)->get_type (),
4991 TREE_TYPE (pv.m_tree));
4992
4993 return result_sid;
4994 }
4995
4996 /* Get the value of EXPR within this region_model (assuming the most
4997 recent stack frame if it's a local). */
4998
4999 svalue_id
5000 region_model::get_rvalue (tree expr, region_model_context *ctxt)
5001 {
5002 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
5003 }
5004
5005 /* Return an svalue_id for a pointer to RID of type PTR_TYPE, reusing
5006 existing pointer values if one is available. */
5007
5008 svalue_id
5009 region_model::get_or_create_ptr_svalue (tree ptr_type, region_id rid)
5010 {
5011 /* Reuse existing region_svalue, if one of the right type is
5012 available. */
5013 /* In theory we could stash a svalue_id in "region", but differing
5014 pointer types muddles things.
5015 For now, just do a linear search through all existing svalues. */
5016 int i;
5017 svalue *svalue;
5018 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
5019 if (region_svalue *ptr_svalue = svalue->dyn_cast_region_svalue ())
5020 if (ptr_svalue->get_pointee () == rid
5021 && ptr_svalue->get_type () == ptr_type)
5022 return svalue_id::from_int (i);
5023
5024 return add_svalue (new region_svalue (ptr_type, rid));
5025 }
5026
5027 /* Return an svalue_id for a constant_svalue for CST_EXPR,
5028 creating the constant_svalue if necessary.
5029 The constant_svalue instances are reused, based on pointer equality
5030 of trees */
5031
5032 svalue_id
5033 region_model::get_or_create_constant_svalue (tree cst_expr)
5034 {
5035 gcc_assert (cst_expr);
5036
5037 /* Reuse one if it already exists. */
5038 // TODO: maybe store a map, rather than do linear search?
5039 int i;
5040 svalue *svalue;
5041 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
5042 if (svalue->maybe_get_constant () == cst_expr)
5043 return svalue_id::from_int (i);
5044
5045 svalue_id cst_sid = add_svalue (new constant_svalue (cst_expr));
5046 return cst_sid;
5047 }
5048
5049 /* Return an svalue_id for a region_svalue for FNDECL,
5050 creating the function_region if necessary. */
5051
5052 svalue_id
5053 region_model::get_svalue_for_fndecl (tree ptr_type, tree fndecl,
5054 region_model_context *ctxt)
5055 {
5056 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
5057 region_id function_rid = get_region_for_fndecl (fndecl, ctxt);
5058 return get_or_create_ptr_svalue (ptr_type, function_rid);
5059 }
5060
5061 /* Return a region_id for a function_region for FNDECL,
5062 creating it if necessary. */
5063
5064 region_id
5065 region_model::get_region_for_fndecl (tree fndecl,
5066 region_model_context *ctxt)
5067 {
5068 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
5069
5070 region_id code_rid = get_root_region ()->ensure_code_region (this);
5071 code_region *code = get_root_region ()->get_code_region (this);
5072
5073 return code->get_or_create (this, code_rid, fndecl, TREE_TYPE (fndecl),
5074 ctxt);
5075 }
5076
5077 /* Return an svalue_id for a region_svalue for LABEL,
5078 creating the label_region if necessary. */
5079
5080 svalue_id
5081 region_model::get_svalue_for_label (tree ptr_type, tree label,
5082 region_model_context *ctxt)
5083 {
5084 gcc_assert (TREE_CODE (label) == LABEL_DECL);
5085 region_id label_rid = get_region_for_label (label, ctxt);
5086 return get_or_create_ptr_svalue (ptr_type, label_rid);
5087 }
5088
5089 /* Return a region_id for a label_region for LABEL,
5090 creating it if necessary. */
5091
5092 region_id
5093 region_model::get_region_for_label (tree label,
5094 region_model_context *ctxt)
5095 {
5096 gcc_assert (TREE_CODE (label) == LABEL_DECL);
5097
5098 tree fndecl = DECL_CONTEXT (label);
5099 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
5100
5101 region_id func_rid = get_region_for_fndecl (fndecl, ctxt);
5102 function_region *func_reg = get_region <function_region> (func_rid);
5103 return func_reg->get_or_create (this, func_rid, label, TREE_TYPE (label),
5104 ctxt);
5105 }
5106
5107 /* Build a cast of SRC_EXPR to DST_TYPE, or return NULL_TREE.
5108
5109 Adapted from gcc::jit::playback::context::build_cast, which in turn is
5110 adapted from
5111 - c/c-typeck.c:build_c_cast
5112 - c/c-convert.c: convert
5113 - convert.h
5114 Only some kinds of cast are currently supported here. */
5115
5116 static tree
5117 build_cast (tree dst_type, tree src_expr)
5118 {
5119 tree result = targetm.convert_to_type (dst_type, src_expr);
5120 if (result)
5121 return result;
5122 enum tree_code dst_code = TREE_CODE (dst_type);
5123 switch (dst_code)
5124 {
5125 case INTEGER_TYPE:
5126 case ENUMERAL_TYPE:
5127 result = convert_to_integer (dst_type, src_expr);
5128 goto maybe_fold;
5129
5130 case BOOLEAN_TYPE:
5131 /* Compare with c_objc_common_truthvalue_conversion and
5132 c_common_truthvalue_conversion. */
5133 /* For now, convert to: (src_expr != 0) */
5134 result = build2 (NE_EXPR, dst_type,
5135 src_expr,
5136 build_int_cst (TREE_TYPE (src_expr), 0));
5137 goto maybe_fold;
5138
5139 case REAL_TYPE:
5140 result = convert_to_real (dst_type, src_expr);
5141 goto maybe_fold;
5142
5143 case POINTER_TYPE:
5144 result = build1 (NOP_EXPR, dst_type, src_expr);
5145 goto maybe_fold;
5146
5147 default:
5148 return NULL_TREE;
5149
5150 maybe_fold:
5151 if (TREE_CODE (result) != C_MAYBE_CONST_EXPR)
5152 result = fold (result);
5153 return result;
5154 }
5155 }
5156
5157 /* If the type of SID's underlying value is DST_TYPE, return SID.
5158 Otherwise, attempt to create (or reuse) an svalue representing an access
5159 of SID as a DST_TYPE and return that value's svalue_id. */
5160
5161 svalue_id
5162 region_model::maybe_cast_1 (tree dst_type, svalue_id sid)
5163 {
5164 svalue *sval = get_svalue (sid);
5165 tree src_type = sval->get_type ();
5166 if (src_type == dst_type)
5167 return sid;
5168
5169 if (POINTER_TYPE_P (dst_type)
5170 || POINTER_TYPE_P (src_type))
5171 {
5172 /* Pointer to region. */
5173 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5174 return get_or_create_ptr_svalue (dst_type, ptr_sval->get_pointee ());
5175
5176 /* Unknown pointer? Get or create a new unknown pointer of the
5177 correct type, preserving the equality between the pointers. */
5178 if (sval->dyn_cast_unknown_svalue ())
5179 {
5180 equiv_class &ec = m_constraints->get_equiv_class (sid);
5181
5182 /* Look for an existing pointer of the correct type within the EC. */
5183 int i;
5184 svalue_id *equiv_sid;
5185 FOR_EACH_VEC_ELT (ec.m_vars, i, equiv_sid)
5186 {
5187 svalue *equiv_val = get_svalue (*equiv_sid);
5188 if (equiv_val->get_type () == dst_type)
5189 return *equiv_sid;
5190 }
5191
5192 /* Otherwise, create a new unknown pointer of the correct type. */
5193 svalue *unknown_sval = new unknown_svalue (dst_type);
5194 svalue_id new_ptr_sid = add_svalue (unknown_sval);
5195 m_constraints->add_constraint (sid, EQ_EXPR, new_ptr_sid);
5196 return new_ptr_sid;
5197 }
5198 }
5199
5200 /* Attempt to cast constants. */
5201 if (tree src_cst = sval->maybe_get_constant ())
5202 {
5203 if (tree dst = build_cast (dst_type, src_cst))
5204 if (CONSTANT_CLASS_P (dst))
5205 return get_or_create_constant_svalue (dst);
5206 }
5207
5208 /* Otherwise, return a new unknown value. */
5209 svalue *unknown_sval = new unknown_svalue (dst_type);
5210 return add_svalue (unknown_sval);
5211 }
5212
5213 /* If the type of SID's underlying value is DST_TYPE, return SID.
5214 Otherwise, attempt to create (or reuse) an svalue representing an access
5215 of SID as a DST_TYPE and return that value's svalue_id.
5216
5217 If the result != SID, then call CTXT's on_cast vfunc (if CTXT is non-NULL),
5218 so that sm-state can be propagated from SID to the result. */
5219
5220 svalue_id
5221 region_model::maybe_cast (tree dst_type, svalue_id sid,
5222 region_model_context *ctxt)
5223 {
5224 svalue_id result = maybe_cast_1 (dst_type, sid);
5225 if (result != sid)
5226 if (ctxt)
5227 {
5228 /* Notify ctxt about a cast, so any sm-state can be copied. */
5229 ctxt->on_cast (sid, result);
5230 }
5231 return result;
5232 }
5233
5234 /* Ensure that the region for OBJ_RID has a child region for FIELD;
5235 return the child region's region_id. */
5236
5237 region_id
5238 region_model::get_field_region (region_id struct_or_union_rid, tree field,
5239 region_model_context *ctxt)
5240 {
5241 struct_or_union_region *sou_reg
5242 = get_region<struct_or_union_region> (struct_or_union_rid);
5243
5244 /* Inherit constness from parent type. */
5245 const int qual_mask = TYPE_QUAL_CONST;
5246 int sou_quals = TYPE_QUALS (sou_reg->get_type ()) & qual_mask;
5247 tree field_type = TREE_TYPE (field);
5248 tree field_type_with_quals = build_qualified_type (field_type, sou_quals);
5249
5250 // TODO: maybe convert to a vfunc?
5251 if (sou_reg->get_kind () == RK_UNION)
5252 {
5253 /* Union.
5254 Get a view of the union as a whole, with the type of the field. */
5255 region_id view_rid
5256 = get_or_create_view (struct_or_union_rid, field_type_with_quals, ctxt);
5257 return view_rid;
5258 }
5259 else
5260 {
5261 /* Struct. */
5262 region_id child_rid
5263 = sou_reg->get_or_create (this, struct_or_union_rid, field,
5264 field_type_with_quals, ctxt);
5265 return child_rid;
5266 }
5267 }
5268
5269 /* Get a region_id for referencing PTR_SID, creating a region if need be, and
5270 potentially generating warnings via CTXT. */
5271
5272 region_id
5273 region_model::deref_rvalue (svalue_id ptr_sid, region_model_context *ctxt)
5274 {
5275 gcc_assert (!ptr_sid.null_p ());
5276 svalue *ptr_svalue = get_svalue (ptr_sid);
5277 gcc_assert (ptr_svalue);
5278
5279 switch (ptr_svalue->get_kind ())
5280 {
5281 case SK_REGION:
5282 {
5283 region_svalue *region_sval = as_a <region_svalue *> (ptr_svalue);
5284 return region_sval->get_pointee ();
5285 }
5286
5287 case SK_CONSTANT:
5288 goto create_symbolic_region;
5289
5290 case SK_POISONED:
5291 {
5292 if (ctxt)
5293 if (tree ptr = get_representative_tree (ptr_sid))
5294 {
5295 poisoned_svalue *poisoned_sval
5296 = as_a <poisoned_svalue *> (ptr_svalue);
5297 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
5298 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind));
5299 }
5300 goto create_symbolic_region;
5301 }
5302
5303 case SK_UNKNOWN:
5304 {
5305 create_symbolic_region:
5306 /* We need a symbolic_region to represent this unknown region.
5307 We don't know if it on the heap, stack, or a global,
5308 so use the root region as parent. */
5309 region_id new_rid
5310 = add_region (new symbolic_region (m_root_rid, NULL_TREE, false));
5311
5312 /* We need to write the region back into the pointer,
5313 or we'll get a new, different region each time.
5314 We do this by changing the meaning of ptr_sid, replacing
5315 the unknown value with the ptr to the new region.
5316 We replace the meaning of the ID rather than simply writing
5317 to PTR's lvalue since there could be several places sharing
5318 the same unknown ptr value. */
5319 svalue *ptr_val
5320 = new region_svalue (ptr_svalue->get_type (), new_rid);
5321 replace_svalue (ptr_sid, ptr_val);
5322
5323 return new_rid;
5324 }
5325
5326 case SK_SETJMP:
5327 goto create_symbolic_region;
5328 }
5329
5330 gcc_unreachable ();
5331 }
5332
5333 /* Get a region_id for referencing PTR, creating a region if need be, and
5334 potentially generating warnings via CTXT. */
5335
5336 region_id
5337 region_model::deref_rvalue (tree ptr, region_model_context *ctxt)
5338 {
5339 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
5340 return deref_rvalue (ptr_sid, ctxt);
5341 }
5342
5343 /* Set the value of the region given by LHS_RID to the value given
5344 by RHS_SID. */
5345
5346 void
5347 region_model::set_value (region_id lhs_rid, svalue_id rhs_sid,
5348 region_model_context *ctxt)
5349 {
5350 gcc_assert (!lhs_rid.null_p ());
5351 gcc_assert (!rhs_sid.null_p ());
5352 get_region (lhs_rid)->set_value (*this, lhs_rid, rhs_sid, ctxt);
5353 }
5354
5355 /* Set the value of the region given by LHS to the value given
5356 by RHS. */
5357
5358 void
5359 region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
5360 {
5361 region_id lhs_rid = get_lvalue (lhs, ctxt);
5362 svalue_id rhs_sid = get_rvalue (rhs, ctxt);
5363 gcc_assert (!lhs_rid.null_p ());
5364 gcc_assert (!rhs_sid.null_p ());
5365 set_value (lhs_rid, rhs_sid, ctxt);
5366 }
5367
5368 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5369 this model. */
5370
5371 tristate
5372 region_model::eval_condition (svalue_id lhs_sid,
5373 enum tree_code op,
5374 svalue_id rhs_sid) const
5375 {
5376 svalue *lhs = get_svalue (lhs_sid);
5377 svalue *rhs = get_svalue (rhs_sid);
5378
5379 /* For now, make no attempt to capture constraints on floating-point
5380 values. */
5381 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
5382 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
5383 return tristate::unknown ();
5384
5385 tristate ts = eval_condition_without_cm (lhs_sid, op, rhs_sid);
5386
5387 if (ts.is_known ())
5388 return ts;
5389
5390 /* Otherwise, try constraints. */
5391 return m_constraints->eval_condition (lhs_sid, op, rhs_sid);
5392 }
5393
5394 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5395 this model, without resorting to the constraint_manager.
5396
5397 This is exposed so that impl_region_model_context::on_state_leak can
5398 check for equality part-way through region_model::purge_unused_svalues
5399 without risking creating new ECs. */
5400
5401 tristate
5402 region_model::eval_condition_without_cm (svalue_id lhs_sid,
5403 enum tree_code op,
5404 svalue_id rhs_sid) const
5405 {
5406 svalue *lhs = get_svalue (lhs_sid);
5407 svalue *rhs = get_svalue (rhs_sid);
5408 gcc_assert (lhs);
5409 gcc_assert (rhs);
5410
5411 /* See what we know based on the values. */
5412 if (lhs && rhs)
5413 {
5414 /* For now, make no attempt to capture constraints on floating-point
5415 values. */
5416 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
5417 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
5418 return tristate::unknown ();
5419
5420 if (lhs == rhs)
5421 {
5422 /* If we have the same svalue, then we have equality
5423 (apart from NaN-handling).
5424 TODO: should this definitely be the case for poisoned values? */
5425 switch (op)
5426 {
5427 case EQ_EXPR:
5428 case GE_EXPR:
5429 case LE_EXPR:
5430 return tristate::TS_TRUE;
5431
5432 case NE_EXPR:
5433 case GT_EXPR:
5434 case LT_EXPR:
5435 return tristate::TS_FALSE;
5436
5437 default:
5438 /* For other ops, use the logic below. */
5439 break;
5440 }
5441 }
5442
5443 /* If we have a pair of region_svalues, compare them. */
5444 if (region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
5445 if (region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
5446 {
5447 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
5448 if (res.is_known ())
5449 return res;
5450 /* Otherwise, only known through constraints. */
5451 }
5452
5453 /* If we have a pair of constants, compare them. */
5454 if (constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
5455 if (constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5456 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
5457
5458 /* Handle comparison of a region_svalue against zero. */
5459 if (region_svalue *ptr = lhs->dyn_cast_region_svalue ())
5460 if (constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5461 if (zerop (cst_rhs->get_constant ()))
5462 {
5463 /* A region_svalue is a non-NULL pointer, except in certain
5464 special cases (see the comment for region::non_null_p. */
5465 region *pointee = get_region (ptr->get_pointee ());
5466 if (pointee->non_null_p (*this))
5467 {
5468 switch (op)
5469 {
5470 default:
5471 gcc_unreachable ();
5472
5473 case EQ_EXPR:
5474 case GE_EXPR:
5475 case LE_EXPR:
5476 return tristate::TS_FALSE;
5477
5478 case NE_EXPR:
5479 case GT_EXPR:
5480 case LT_EXPR:
5481 return tristate::TS_TRUE;
5482 }
5483 }
5484 }
5485 }
5486
5487 return tristate::TS_UNKNOWN;
5488 }
5489
5490 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5491 If it is consistent with existing constraints, add it, and return true.
5492 Return false if it contradicts existing constraints.
5493 Use CTXT for reporting any diagnostics associated with the accesses. */
5494
5495 bool
5496 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5497 region_model_context *ctxt)
5498 {
5499 /* For now, make no attempt to capture constraints on floating-point
5500 values. */
5501 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5502 return true;
5503
5504 svalue_id lhs_sid = get_rvalue (lhs, ctxt);
5505 svalue_id rhs_sid = get_rvalue (rhs, ctxt);
5506
5507 tristate t_cond = eval_condition (lhs_sid, op, rhs_sid);
5508
5509 /* If we already have the condition, do nothing. */
5510 if (t_cond.is_true ())
5511 return true;
5512
5513 /* Reject a constraint that would contradict existing knowledge, as
5514 unsatisfiable. */
5515 if (t_cond.is_false ())
5516 return false;
5517
5518 /* Store the constraint. */
5519 m_constraints->add_constraint (lhs_sid, op, rhs_sid);
5520
5521 add_any_constraints_from_ssa_def_stmt (lhs, op, rhs, ctxt);
5522
5523 /* Notify the context, if any. This exists so that the state machines
5524 in a program_state can be notified about the condition, and so can
5525 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5526 when synthesizing constraints as above. */
5527 if (ctxt)
5528 ctxt->on_condition (lhs, op, rhs);
5529
5530 return true;
5531 }
5532
5533 /* Subroutine of region_model::add_constraint for handling optimized
5534 && and || conditionals.
5535
5536 If we have an SSA_NAME for a boolean compared against 0,
5537 look at anything implied by the def stmt and call add_constraint
5538 for it (which could recurse).
5539
5540 For example, if we have
5541 _1 = p_6 == 0B;
5542 _2 = p_8 == 0B
5543 _3 = _1 | _2
5544 and add the constraint
5545 (_3 == 0),
5546 then the def stmt for _3 implies that _1 and _2 are both false,
5547 and hence we can add the constraints:
5548 p_6 != 0B
5549 p_8 != 0B. */
5550
5551 void
5552 region_model::add_any_constraints_from_ssa_def_stmt (tree lhs,
5553 enum tree_code op,
5554 tree rhs,
5555 region_model_context *ctxt)
5556 {
5557 if (TREE_CODE (lhs) != SSA_NAME)
5558 return;
5559
5560 if (!zerop (rhs))
5561 return;
5562
5563 if (op != NE_EXPR && op != EQ_EXPR)
5564 return;
5565
5566 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
5567 if (const gassign *assign = dyn_cast<gassign *> (def_stmt))
5568 add_any_constraints_from_gassign (op, rhs, assign, ctxt);
5569 else if (gcall *call = dyn_cast<gcall *> (def_stmt))
5570 add_any_constraints_from_gcall (op, rhs, call, ctxt);
5571 }
5572
5573 /* Add any constraints for an SSA_NAME defined by ASSIGN
5574 where the result OP RHS. */
5575
5576 void
5577 region_model::add_any_constraints_from_gassign (enum tree_code op,
5578 tree rhs,
5579 const gassign *assign,
5580 region_model_context *ctxt)
5581 {
5582 /* We have either
5583 - "LHS != false" (i.e. LHS is true), or
5584 - "LHS == false" (i.e. LHS is false). */
5585 bool is_true = op == NE_EXPR;
5586
5587 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
5588
5589 switch (rhs_code)
5590 {
5591 default:
5592 break;
5593
5594 case NOP_EXPR:
5595 {
5596 add_constraint (gimple_assign_rhs1 (assign), op, rhs, ctxt);
5597 }
5598 break;
5599
5600 case BIT_AND_EXPR:
5601 {
5602 if (is_true)
5603 {
5604 /* ...and "LHS == (rhs1 & rhs2) i.e. "(rhs1 & rhs2)" is true
5605 then both rhs1 and rhs2 must be true. */
5606 tree rhs1 = gimple_assign_rhs1 (assign);
5607 tree rhs2 = gimple_assign_rhs2 (assign);
5608 add_constraint (rhs1, NE_EXPR, boolean_false_node, ctxt);
5609 add_constraint (rhs2, NE_EXPR, boolean_false_node, ctxt);
5610 }
5611 }
5612 break;
5613
5614 case BIT_IOR_EXPR:
5615 {
5616 if (!is_true)
5617 {
5618 /* ...and "LHS == (rhs1 | rhs2)
5619 i.e. "(rhs1 | rhs2)" is false
5620 then both rhs1 and rhs2 must be false. */
5621 tree rhs1 = gimple_assign_rhs1 (assign);
5622 tree rhs2 = gimple_assign_rhs2 (assign);
5623 add_constraint (rhs1, EQ_EXPR, boolean_false_node, ctxt);
5624 add_constraint (rhs2, EQ_EXPR, boolean_false_node, ctxt);
5625 }
5626 }
5627 break;
5628
5629 case EQ_EXPR:
5630 case NE_EXPR:
5631 {
5632 /* ...and "LHS == (rhs1 OP rhs2)"
5633 then rhs1 OP rhs2 must have the same logical value as LHS. */
5634 tree rhs1 = gimple_assign_rhs1 (assign);
5635 tree rhs2 = gimple_assign_rhs2 (assign);
5636 if (!is_true)
5637 rhs_code
5638 = invert_tree_comparison (rhs_code, false /* honor_nans */);
5639 add_constraint (rhs1, rhs_code, rhs2, ctxt);
5640 }
5641 break;
5642 }
5643 }
5644
5645 /* Add any constraints for an SSA_NAME defined by CALL
5646 where the result OP RHS. */
5647
5648 void
5649 region_model::add_any_constraints_from_gcall (enum tree_code op,
5650 tree rhs,
5651 const gcall *call,
5652 region_model_context *ctxt)
5653 {
5654 if (gimple_call_builtin_p (call, BUILT_IN_EXPECT)
5655 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY)
5656 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT))
5657 {
5658 /* __builtin_expect's return value is its initial argument. */
5659 add_constraint (gimple_call_arg (call, 0), op, rhs, ctxt);
5660 }
5661 }
5662
5663 /* Determine what is known about the condition "LHS OP RHS" within
5664 this model.
5665 Use CTXT for reporting any diagnostics associated with the accesses. */
5666
5667 tristate
5668 region_model::eval_condition (tree lhs,
5669 enum tree_code op,
5670 tree rhs,
5671 region_model_context *ctxt)
5672 {
5673 /* For now, make no attempt to model constraints on floating-point
5674 values. */
5675 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5676 return tristate::unknown ();
5677
5678 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
5679 }
5680
5681 /* If SID is a constant value, return the underlying tree constant.
5682 Otherwise, return NULL_TREE. */
5683
5684 tree
5685 region_model::maybe_get_constant (svalue_id sid) const
5686 {
5687 gcc_assert (!sid.null_p ());
5688 svalue *sval = get_svalue (sid);
5689 return sval->maybe_get_constant ();
5690 }
5691
5692 /* Create a new child region of the heap (creating the heap region if
5693 necessary).
5694 Return the region_id of the new child region. */
5695
5696 region_id
5697 region_model::add_new_malloc_region ()
5698 {
5699 region_id heap_rid
5700 = get_root_region ()->ensure_heap_region (this);
5701 return add_region (new symbolic_region (heap_rid, NULL_TREE, true));
5702 }
5703
5704 /* Attempt to return a tree that represents SID, or return NULL_TREE. */
5705
5706 tree
5707 region_model::get_representative_tree (svalue_id sid) const
5708 {
5709 if (sid.null_p ())
5710 return NULL_TREE;
5711
5712 /* Find the first region that stores the value (e.g. a local) and
5713 generate a representative tree for it. */
5714 unsigned i;
5715 region *region;
5716 FOR_EACH_VEC_ELT (m_regions, i, region)
5717 if (sid == region->get_value_direct ())
5718 {
5719 path_var pv = get_representative_path_var (region_id::from_int (i));
5720 if (pv.m_tree)
5721 return pv.m_tree;
5722 }
5723
5724 /* Handle string literals and various other pointers. */
5725 svalue *sval = get_svalue (sid);
5726 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5727 {
5728 region_id rid = ptr_sval->get_pointee ();
5729 path_var pv = get_representative_path_var (rid);
5730 if (pv.m_tree)
5731 return build1 (ADDR_EXPR,
5732 TREE_TYPE (sval->get_type ()),
5733 pv.m_tree);
5734 }
5735
5736 return maybe_get_constant (sid);
5737 }
5738
5739 /* Attempt to return a path_var that represents the region, or return
5740 the NULL path_var.
5741 For example, a region for a field of a local would be a path_var
5742 wrapping a COMPONENT_REF. */
5743
5744 path_var
5745 region_model::get_representative_path_var (region_id rid) const
5746 {
5747 region *reg = get_region (rid);
5748 region *parent_reg = get_region (reg->get_parent ());
5749 region_id stack_rid = get_stack_region_id ();
5750 if (!stack_rid.null_p ())
5751 if (parent_reg && parent_reg->get_parent () == stack_rid)
5752 {
5753 frame_region *parent_frame = (frame_region *)parent_reg;
5754 tree t = parent_frame->get_tree_for_child_region (rid);
5755 return path_var (t, parent_frame->get_depth ());
5756 }
5757 if (reg->get_parent () == get_globals_region_id ())
5758 {
5759 map_region *globals = get_root_region ()->get_globals_region (this);
5760 if (globals)
5761 return path_var (globals->get_tree_for_child_region (rid), -1);
5762 }
5763
5764 /* Handle e.g. fields of a local by recursing. */
5765 region_id parent_rid = reg->get_parent ();
5766 if (parent_reg)
5767 {
5768 if (reg->is_view_p ())
5769 {
5770 path_var parent_pv = get_representative_path_var (parent_rid);
5771 if (parent_pv.m_tree && reg->get_type ())
5772 return path_var (build1 (NOP_EXPR,
5773 reg->get_type (),
5774 parent_pv.m_tree),
5775 parent_pv.m_stack_depth);
5776 }
5777
5778 if (parent_reg->get_kind () == RK_STRUCT)
5779 {
5780 map_region *parent_map_region = (map_region *)parent_reg;
5781 /* This can fail if we have a view, rather than a field. */
5782 if (tree child_key
5783 = parent_map_region->get_tree_for_child_region (rid))
5784 {
5785 path_var parent_pv = get_representative_path_var (parent_rid);
5786 if (parent_pv.m_tree && TREE_CODE (child_key) == FIELD_DECL)
5787 return path_var (build3 (COMPONENT_REF,
5788 TREE_TYPE (child_key),
5789 parent_pv.m_tree, child_key,
5790 NULL_TREE),
5791 parent_pv.m_stack_depth);
5792 }
5793 }
5794
5795 /* Handle elements within an array. */
5796 if (array_region *array_reg = parent_reg->dyn_cast_array_region ())
5797 {
5798 array_region::key_t key;
5799 if (array_reg->get_key_for_child_region (rid, &key))
5800 {
5801 path_var parent_pv = get_representative_path_var (parent_rid);
5802 if (parent_pv.m_tree && reg->get_type ())
5803 {
5804 tree index = array_reg->constant_from_key (key);
5805 return path_var (build4 (ARRAY_REF,
5806 reg->get_type (),
5807 parent_pv.m_tree, index,
5808 NULL_TREE, NULL_TREE),
5809 parent_pv.m_stack_depth);
5810 }
5811 }
5812 }
5813 }
5814
5815 /* Handle string literals. */
5816 svalue_id sid = reg->get_value_direct ();
5817 if (svalue *sval = get_svalue (sid))
5818 if (tree cst = sval->maybe_get_constant ())
5819 if (TREE_CODE (cst) == STRING_CST)
5820 return path_var (cst, 0);
5821
5822 return path_var (NULL_TREE, 0);
5823 }
5824
5825 /* Locate all regions that directly have value SID and append representative
5826 path_var instances for them into *OUT. */
5827
5828 void
5829 region_model::get_path_vars_for_svalue (svalue_id sid, vec<path_var> *out) const
5830 {
5831 unsigned i;
5832 region *region;
5833 FOR_EACH_VEC_ELT (m_regions, i, region)
5834 if (sid == region->get_value_direct ())
5835 {
5836 path_var pv = get_representative_path_var (region_id::from_int (i));
5837 if (pv.m_tree)
5838 out->safe_push (pv);
5839 }
5840 }
5841
5842 /* Set DST_RID value to be a new unknown value of type TYPE. */
5843
5844 svalue_id
5845 region_model::set_to_new_unknown_value (region_id dst_rid, tree type,
5846 region_model_context *ctxt)
5847 {
5848 gcc_assert (!dst_rid.null_p ());
5849 svalue_id new_sid = add_svalue (new unknown_svalue (type));
5850 set_value (dst_rid, new_sid, ctxt);
5851
5852 // TODO: presumably purge all child regions too (but do this in set_value?)
5853
5854 return new_sid;
5855 }
5856
5857 /* Update this model for any phis in SNODE, assuming we came from
5858 LAST_CFG_SUPEREDGE. */
5859
5860 void
5861 region_model::update_for_phis (const supernode *snode,
5862 const cfg_superedge *last_cfg_superedge,
5863 region_model_context *ctxt)
5864 {
5865 gcc_assert (last_cfg_superedge);
5866
5867 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
5868 !gsi_end_p (gpi); gsi_next (&gpi))
5869 {
5870 gphi *phi = gpi.phi ();
5871
5872 tree src = last_cfg_superedge->get_phi_arg (phi);
5873 tree lhs = gimple_phi_result (phi);
5874
5875 /* Update next_state based on phi. */
5876 bool is_back_edge = last_cfg_superedge->back_edge_p ();
5877 handle_phi (phi, lhs, src, is_back_edge, ctxt);
5878 }
5879 }
5880
5881 /* Attempt to update this model for taking EDGE (where the last statement
5882 was LAST_STMT), returning true if the edge can be taken, false
5883 otherwise.
5884
5885 For CFG superedges where LAST_STMT is a conditional or a switch
5886 statement, attempt to add the relevant conditions for EDGE to this
5887 model, returning true if they are feasible, or false if they are
5888 impossible.
5889
5890 For call superedges, push frame information and store arguments
5891 into parameters.
5892
5893 For return superedges, pop frame information and store return
5894 values into any lhs.
5895
5896 Rejection of call/return superedges happens elsewhere, in
5897 program_point::on_edge (i.e. based on program point, rather
5898 than program state). */
5899
5900 bool
5901 region_model::maybe_update_for_edge (const superedge &edge,
5902 const gimple *last_stmt,
5903 region_model_context *ctxt)
5904 {
5905 /* Handle frame updates for interprocedural edges. */
5906 switch (edge.m_kind)
5907 {
5908 default:
5909 break;
5910
5911 case SUPEREDGE_CALL:
5912 {
5913 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5914 update_for_call_superedge (*call_edge, ctxt);
5915 }
5916 break;
5917
5918 case SUPEREDGE_RETURN:
5919 {
5920 const return_superedge *return_edge
5921 = as_a <const return_superedge *> (&edge);
5922 update_for_return_superedge (*return_edge, ctxt);
5923 }
5924 break;
5925
5926 case SUPEREDGE_INTRAPROCEDURAL_CALL:
5927 {
5928 const callgraph_superedge *cg_sedge
5929 = as_a <const callgraph_superedge *> (&edge);
5930 update_for_call_summary (*cg_sedge, ctxt);
5931 }
5932 break;
5933 }
5934
5935 if (last_stmt == NULL)
5936 return true;
5937
5938 /* Apply any constraints for conditionals/switch statements. */
5939
5940 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5941 {
5942 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
5943 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt);
5944 }
5945
5946 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5947 {
5948 const switch_cfg_superedge *switch_sedge
5949 = as_a <const switch_cfg_superedge *> (&edge);
5950 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt, ctxt);
5951 }
5952
5953 return true;
5954 }
5955
5956 /* Push a new frame_region on to the stack region.
5957 Populate the frame_region with child regions for the function call's
5958 parameters, using values from the arguments at the callsite in the
5959 caller's frame. */
5960
5961 void
5962 region_model::update_for_call_superedge (const call_superedge &call_edge,
5963 region_model_context *ctxt)
5964 {
5965 /* Build a vec of argument svalue_id, using the current top
5966 frame for resolving tree expressions. */
5967 const gcall *call_stmt = call_edge.get_call_stmt ();
5968 auto_vec<svalue_id> arg_sids (gimple_call_num_args (call_stmt));
5969
5970 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5971 {
5972 tree arg = gimple_call_arg (call_stmt, i);
5973 arg_sids.quick_push (get_rvalue (arg, ctxt));
5974 }
5975
5976 push_frame (call_edge.get_callee_function (), &arg_sids, ctxt);
5977 }
5978
5979 /* Pop the top-most frame_region from the stack, and store the svalue
5980 for any returned value into the region for the lvalue of the LHS of
5981 the call (if any). */
5982
5983 void
5984 region_model::update_for_return_superedge (const return_superedge &return_edge,
5985 region_model_context *ctxt)
5986 {
5987 purge_stats stats;
5988 svalue_id result_sid = pop_frame (true, &stats, ctxt);
5989 // TODO: do something with the stats?
5990
5991 if (result_sid.null_p ())
5992 return;
5993
5994 /* Set the result of the call, within the caller frame. */
5995 const gcall *call_stmt = return_edge.get_call_stmt ();
5996 tree lhs = gimple_call_lhs (call_stmt);
5997 if (lhs)
5998 set_value (get_lvalue (lhs, ctxt), result_sid, ctxt);
5999 else
6000 {
6001 /* This could be a leak; try purging again, but this time,
6002 don't special-case the result_sid. */
6003 purge_stats stats;
6004 purge_unused_svalues (&stats, ctxt);
6005 }
6006 }
6007
6008 /* Update this region_model with a summary of the effect of calling
6009 and returning from CG_SEDGE.
6010
6011 TODO: Currently this is extremely simplistic: we merely set the
6012 return value to "unknown". A proper implementation would e.g. update
6013 sm-state, and presumably be reworked to support multiple outcomes. */
6014
6015 void
6016 region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
6017 region_model_context *ctxt)
6018 {
6019 /* For now, set any return value to "unknown". */
6020 const gcall *call_stmt = cg_sedge.get_call_stmt ();
6021 tree lhs = gimple_call_lhs (call_stmt);
6022 if (lhs)
6023 set_to_new_unknown_value (get_lvalue (lhs, ctxt), TREE_TYPE (lhs), ctxt);
6024
6025 // TODO: actually implement some kind of summary here
6026 }
6027
6028 /* Given a true or false edge guarded by conditional statement COND_STMT,
6029 determine appropriate constraints for the edge to be taken.
6030
6031 If they are feasible, add the constraints and return true.
6032
6033 Return false if the constraints contradict existing knowledge
6034 (and so the edge should not be taken). */
6035
6036 bool
6037 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
6038 const gcond *cond_stmt,
6039 region_model_context *ctxt)
6040 {
6041 ::edge cfg_edge = sedge.get_cfg_edge ();
6042 gcc_assert (cfg_edge != NULL);
6043 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
6044
6045 enum tree_code op = gimple_cond_code (cond_stmt);
6046 tree lhs = gimple_cond_lhs (cond_stmt);
6047 tree rhs = gimple_cond_rhs (cond_stmt);
6048 if (cfg_edge->flags & EDGE_FALSE_VALUE)
6049 op = invert_tree_comparison (op, false /* honor_nans */);
6050 return add_constraint (lhs, op, rhs, ctxt);
6051 }
6052
6053 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
6054 for the edge to be taken.
6055
6056 If they are feasible, add the constraints and return true.
6057
6058 Return false if the constraints contradict existing knowledge
6059 (and so the edge should not be taken). */
6060
6061 bool
6062 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
6063 const gswitch *switch_stmt,
6064 region_model_context *ctxt)
6065 {
6066 tree index = gimple_switch_index (switch_stmt);
6067 tree case_label = edge.get_case_label ();
6068 gcc_assert (TREE_CODE (case_label) == CASE_LABEL_EXPR);
6069 tree lower_bound = CASE_LOW (case_label);
6070 tree upper_bound = CASE_HIGH (case_label);
6071 if (lower_bound)
6072 {
6073 if (upper_bound)
6074 {
6075 /* Range. */
6076 if (!add_constraint (index, GE_EXPR, lower_bound, ctxt))
6077 return false;
6078 return add_constraint (index, LE_EXPR, upper_bound, ctxt);
6079 }
6080 else
6081 /* Single-value. */
6082 return add_constraint (index, EQ_EXPR, lower_bound, ctxt);
6083 }
6084 else
6085 {
6086 /* The default case.
6087 Add exclusions based on the other cases. */
6088 for (unsigned other_idx = 1;
6089 other_idx < gimple_switch_num_labels (switch_stmt);
6090 other_idx++)
6091 {
6092 tree other_label = gimple_switch_label (switch_stmt,
6093 other_idx);
6094 tree other_lower_bound = CASE_LOW (other_label);
6095 tree other_upper_bound = CASE_HIGH (other_label);
6096 gcc_assert (other_lower_bound);
6097 if (other_upper_bound)
6098 {
6099 /* Exclude this range-valued case.
6100 For now, we just exclude the boundary values.
6101 TODO: exclude the values within the region. */
6102 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
6103 return false;
6104 if (!add_constraint (index, NE_EXPR, other_upper_bound, ctxt))
6105 return false;
6106 }
6107 else
6108 /* Exclude this single-valued case. */
6109 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
6110 return false;
6111 }
6112 return true;
6113 }
6114 }
6115
6116 /* Get the root_region within this model (guaranteed to be non-null). */
6117
6118 root_region *
6119 region_model::get_root_region () const
6120 {
6121 return get_region<root_region> (m_root_rid);
6122 }
6123
6124 /* Get the region_id of this model's stack region (if any). */
6125
6126 region_id
6127 region_model::get_stack_region_id () const
6128 {
6129 return get_root_region ()->get_stack_region_id ();
6130 }
6131
6132 /* Create a new frame_region for a call to FUN and push it onto
6133 the stack.
6134
6135 If ARG_SIDS is non-NULL, use it to populate the parameters
6136 in the new frame.
6137 Otherwise, populate them with unknown values.
6138
6139 Return the region_id of the new frame_region. */
6140
6141 region_id
6142 region_model::push_frame (function *fun, vec<svalue_id> *arg_sids,
6143 region_model_context *ctxt)
6144 {
6145 return get_root_region ()->push_frame (this, fun, arg_sids, ctxt);
6146 }
6147
6148 /* Get the region_id of the top-most frame in this region_model's stack,
6149 if any. */
6150
6151 region_id
6152 region_model::get_current_frame_id () const
6153 {
6154 return get_root_region ()->get_current_frame_id (*this);
6155 }
6156
6157 /* Get the function of the top-most frame in this region_model's stack.
6158 There must be such a frame. */
6159
6160 function *
6161 region_model::get_current_function () const
6162 {
6163 region_id frame_id = get_current_frame_id ();
6164 frame_region *frame = get_region<frame_region> (frame_id);
6165 return frame->get_function ();
6166 }
6167
6168 /* Pop the topmost frame_region from this region_model's stack;
6169 see the comment for stack_region::pop_frame. */
6170
6171 svalue_id
6172 region_model::pop_frame (bool purge, purge_stats *out,
6173 region_model_context *ctxt)
6174 {
6175 return get_root_region ()->pop_frame (this, purge, out, ctxt);
6176 }
6177
6178 /* Get the number of frames in this region_model's stack. */
6179
6180 int
6181 region_model::get_stack_depth () const
6182 {
6183 stack_region *stack = get_root_region ()->get_stack_region (this);
6184 if (stack)
6185 return stack->get_num_frames ();
6186 else
6187 return 0;
6188 }
6189
6190 /* Get the function * at DEPTH within the call stack. */
6191
6192 function *
6193 region_model::get_function_at_depth (unsigned depth) const
6194 {
6195 stack_region *stack = get_root_region ()->get_stack_region (this);
6196 gcc_assert (stack);
6197 region_id frame_rid = stack->get_frame_rid (depth);
6198 frame_region *frame = get_region <frame_region> (frame_rid);
6199 return frame->get_function ();
6200 }
6201
6202 /* Get the region_id of this model's globals region (if any). */
6203
6204 region_id
6205 region_model::get_globals_region_id () const
6206 {
6207 return get_root_region ()->get_globals_region_id ();
6208 }
6209
6210 /* Add SVAL to this model, taking ownership, and returning its new
6211 svalue_id. */
6212
6213 svalue_id
6214 region_model::add_svalue (svalue *sval)
6215 {
6216 gcc_assert (sval);
6217 m_svalues.safe_push (sval);
6218 return svalue_id::from_int (m_svalues.length () - 1);
6219 }
6220
6221 /* Change the meaning of SID to be NEW_SVAL
6222 (e.g. when deferencing an unknown pointer, the pointer
6223 becomes a pointer to a symbolic region, so that all users
6224 of the former unknown pointer are now effectively pointing
6225 at the same region). */
6226
6227 void
6228 region_model::replace_svalue (svalue_id sid, svalue *new_sval)
6229 {
6230 gcc_assert (!sid.null_p ());
6231 int idx = sid.as_int ();
6232
6233 gcc_assert (m_svalues[idx]);
6234 gcc_assert (m_svalues[idx]->get_type () == new_sval->get_type ());
6235 delete m_svalues[idx];
6236
6237 m_svalues[idx] = new_sval;
6238 }
6239
6240 /* Add region R to this model, taking ownership, and returning its new
6241 region_id. */
6242
6243 region_id
6244 region_model::add_region (region *r)
6245 {
6246 gcc_assert (r);
6247 m_regions.safe_push (r);
6248 return region_id::from_int (m_regions.length () - 1);
6249 }
6250
6251 /* Return the svalue with id SVAL_ID, or NULL for a null id. */
6252
6253 svalue *
6254 region_model::get_svalue (svalue_id sval_id) const
6255 {
6256 if (sval_id.null_p ())
6257 return NULL;
6258 return m_svalues[sval_id.as_int ()];
6259 }
6260
6261 /* Return the region with id RID, or NULL for a null id. */
6262
6263 region *
6264 region_model::get_region (region_id rid) const
6265 {
6266 if (rid.null_p ())
6267 return NULL;
6268 return m_regions[rid.as_int ()];
6269 }
6270
6271 /* Make a region of an appropriate subclass for TYPE,
6272 with parent PARENT_RID, or return NULL for types we don't yet know
6273 how to handle. */
6274
6275 static region *
6276 make_region_for_type (region_id parent_rid, tree type)
6277 {
6278 gcc_assert (TYPE_P (type));
6279
6280 if (INTEGRAL_TYPE_P (type)
6281 || SCALAR_FLOAT_TYPE_P (type)
6282 || POINTER_TYPE_P (type)
6283 || TREE_CODE (type) == COMPLEX_TYPE
6284 || TREE_CODE (type) == VECTOR_TYPE)
6285 return new primitive_region (parent_rid, type);
6286
6287 if (TREE_CODE (type) == RECORD_TYPE)
6288 return new struct_region (parent_rid, type);
6289
6290 if (TREE_CODE (type) == ARRAY_TYPE)
6291 return new array_region (parent_rid, type);
6292
6293 if (TREE_CODE (type) == UNION_TYPE)
6294 return new union_region (parent_rid, type);
6295
6296 if (FUNC_OR_METHOD_TYPE_P (type))
6297 return new function_region (parent_rid, type);
6298
6299 /* If we have a void *, make a new symbolic region. */
6300 if (VOID_TYPE_P (type))
6301 return new symbolic_region (parent_rid, type, false);
6302
6303 return NULL;
6304 }
6305
6306 /* Add a region with type TYPE and parent PARENT_RID. */
6307
6308 region_id
6309 region_model::add_region_for_type (region_id parent_rid, tree type,
6310 region_model_context *ctxt)
6311 {
6312 gcc_assert (TYPE_P (type));
6313
6314 if (region *new_region = make_region_for_type (parent_rid, type))
6315 return add_region (new_region);
6316
6317 /* If we can't handle TYPE, return a placeholder region, and stop
6318 exploring this path. */
6319 return make_region_for_unexpected_tree_code (ctxt, type,
6320 dump_location_t ());
6321 }
6322
6323 /* Helper class for region_model::purge_unused_svalues. */
6324
6325 class restrict_to_used_svalues : public purge_criteria
6326 {
6327 public:
6328 restrict_to_used_svalues (const auto_sbitmap &used) : m_used (used) {}
6329
6330 bool should_purge_p (svalue_id sid) const FINAL OVERRIDE
6331 {
6332 gcc_assert (!sid.null_p ());
6333 return !bitmap_bit_p (m_used, sid.as_int ());
6334 }
6335
6336 private:
6337 const auto_sbitmap &m_used;
6338 };
6339
6340 /* Remove unused svalues from this model, accumulating stats into STATS.
6341 Unused svalues are deleted. Doing so could reorder the svalues, and
6342 thus change the meaning of svalue_ids.
6343
6344 If CTXT is non-NULL, then it is notified about svalue_id remappings,
6345 and about svalue_ids that are about to be deleted. This allows e.g.
6346 for warning about resource leaks, for the case where the svalue
6347 represents a resource handle in the user code (e.g. a FILE * or a malloc
6348 buffer).
6349
6350 Amongst other things, removing unused svalues is important for ensuring
6351 that the analysis of loops terminates. Otherwise, we could generate a
6352 succession of models with unreferenced "unknown" values, where the
6353 number of redundant unknown values could grow without bounds, and each
6354 such model would be treated as distinct.
6355
6356 If KNOWN_USED is non-NULL, treat *KNOWN_USED as used (this is for
6357 handling values being returned from functions as their frame is popped,
6358 since otherwise we'd have to simultaneously determine both the rvalue
6359 of the return expr in the callee frame and the lvalue for the gcall's
6360 assignment in the caller frame, and it seems cleaner to express all
6361 lvalue and rvalue lookups implicitly relative to a "current" frame). */
6362
6363 void
6364 region_model::purge_unused_svalues (purge_stats *stats,
6365 region_model_context *ctxt,
6366 svalue_id *known_used_sid)
6367 {
6368 // TODO: might want to avoid a vfunc call just to do logging here:
6369 logger *logger = ctxt ? ctxt->get_logger () : NULL;
6370
6371 LOG_SCOPE (logger);
6372
6373 auto_sbitmap used (m_svalues.length ());
6374 bitmap_clear (used);
6375
6376 if (known_used_sid)
6377 if (!known_used_sid->null_p ())
6378 bitmap_set_bit (used, known_used_sid->as_int ());
6379
6380 /* Walk the regions, marking sids that are used. */
6381 unsigned i;
6382 region *r;
6383 FOR_EACH_VEC_ELT (m_regions, i, r)
6384 {
6385 svalue_id sid = r->get_value_direct ();
6386 if (!sid.null_p ())
6387 bitmap_set_bit (used, sid.as_int ());
6388 }
6389
6390 /* Now purge any constraints involving svalues we don't care about. */
6391 restrict_to_used_svalues criterion (used);
6392 m_constraints->purge (criterion, stats);
6393
6394 /* Mark any sids that are in constraints that survived. */
6395 {
6396 equiv_class *ec;
6397 FOR_EACH_VEC_ELT (m_constraints->m_equiv_classes, i, ec)
6398 {
6399 int j;
6400 svalue_id *sid;
6401 FOR_EACH_VEC_ELT (ec->m_vars, j, sid)
6402 {
6403 gcc_assert (!sid->null_p ());
6404 bitmap_set_bit (used, sid->as_int ());
6405 }
6406 }
6407 }
6408
6409 /* Build a mapping from old-sid to new-sid so that we can preserve
6410 order of the used IDs and move all redundant ones to the end.
6411 Iterate though svalue IDs, adding used ones to the front of
6412 the new list, and unused ones to the back. */
6413 svalue_id_map map (m_svalues.length ());
6414 int next_used_new_sid = 0;
6415 int after_next_unused_new_sid = m_svalues.length ();
6416 for (unsigned i = 0; i < m_svalues.length (); i++)
6417 {
6418 svalue_id src (svalue_id::from_int (i));
6419 if (bitmap_bit_p (used, i))
6420 {
6421 if (logger)
6422 logger->log ("sv%i is used", i);
6423 map.put (src, svalue_id::from_int (next_used_new_sid++));
6424 }
6425 else
6426 {
6427 if (logger)
6428 logger->log ("sv%i is unused", i);
6429 map.put (src, svalue_id::from_int (--after_next_unused_new_sid));
6430 }
6431 }
6432 /* The two insertion points should have met. */
6433 gcc_assert (next_used_new_sid == after_next_unused_new_sid);
6434
6435 /* Now walk the regions and the constraints, remapping sids,
6436 so that all the redundant svalues are at the end. */
6437 remap_svalue_ids (map);
6438
6439 if (logger)
6440 {
6441 logger->start_log_line ();
6442 logger->log_partial ("map: ");
6443 map.dump_to_pp (logger->get_printer ());
6444 logger->end_log_line ();
6445 }
6446
6447 /* Notify any client about the remapping and pending deletion.
6448 Potentially this could trigger leak warnings. */
6449 if (ctxt)
6450 {
6451 ctxt->remap_svalue_ids (map);
6452 int num_client_items_purged
6453 = ctxt->on_svalue_purge (svalue_id::from_int (next_used_new_sid), map);
6454 if (stats)
6455 stats->m_num_client_items += num_client_items_purged;
6456 }
6457
6458 /* Drop the redundant svalues from the end of the vector. */
6459 while ((signed)m_svalues.length () > next_used_new_sid)
6460 {
6461 if (logger)
6462 {
6463 svalue_id victim = svalue_id::from_int (m_svalues.length () - 1);
6464 logger->log ("deleting sv%i (was sv%i)",
6465 victim.as_int (),
6466 map.get_src_for_dst (victim).as_int ());
6467 }
6468 delete m_svalues.pop ();
6469 if (stats)
6470 stats->m_num_svalues++;
6471 }
6472
6473 if (known_used_sid)
6474 map.update (known_used_sid);
6475
6476 validate ();
6477 }
6478
6479 /* Renumber the svalues within this model according to MAP. */
6480
6481 void
6482 region_model::remap_svalue_ids (const svalue_id_map &map)
6483 {
6484 /* Update IDs within regions. */
6485 unsigned i;
6486 region *r;
6487 FOR_EACH_VEC_ELT (m_regions, i, r)
6488 r->remap_svalue_ids (map);
6489
6490 /* Update IDs within ECs within constraints. */
6491 m_constraints->remap_svalue_ids (map);
6492
6493 /* Build a reordered svalues vector. */
6494 auto_vec<svalue *> new_svalues (m_svalues.length ());
6495 for (unsigned i = 0; i < m_svalues.length (); i++)
6496 {
6497 svalue_id dst (svalue_id::from_int (i));
6498 svalue_id src = map.get_src_for_dst (dst);
6499 new_svalues.quick_push (get_svalue (src));
6500 }
6501
6502 /* Copy over the reordered vec to m_svalues. */
6503 m_svalues.truncate (0);
6504 gcc_assert (m_svalues.space (new_svalues.length ()));
6505 svalue *sval;
6506 FOR_EACH_VEC_ELT (new_svalues, i, sval)
6507 m_svalues.quick_push (sval);
6508 }
6509
6510 /* Renumber the regions within this model according to MAP. */
6511
6512 void
6513 region_model::remap_region_ids (const region_id_map &map)
6514 {
6515 /* Update IDs within regions. */
6516 unsigned i;
6517 region *r;
6518 FOR_EACH_VEC_ELT (m_regions, i, r)
6519 r->remap_region_ids (map);
6520
6521 /* Update IDs within svalues. */
6522 svalue *sval;
6523 FOR_EACH_VEC_ELT (m_svalues, i, sval)
6524 sval->remap_region_ids (map);
6525
6526 /* Build a reordered regions vector. */
6527 auto_vec<region *> new_regions (m_regions.length ());
6528 for (unsigned i = 0; i < m_regions.length (); i++)
6529 {
6530 region_id dst (region_id::from_int (i));
6531 region_id src = map.get_src_for_dst (dst);
6532 new_regions.quick_push (get_region (src));
6533 }
6534
6535 /* Copy over the reordered vec to m_regions. */
6536 m_regions.truncate (0);
6537 gcc_assert (m_regions.space (new_regions.length ()));
6538 FOR_EACH_VEC_ELT (new_regions, i, r)
6539 m_regions.quick_push (r);
6540 }
6541
6542 /* Delete all regions within SET_TO_PURGE, remapping region IDs for
6543 other regions. It's required that there are no uses of the
6544 regions within the set (or the region IDs will become invalid).
6545
6546 Accumulate stats to STATS. */
6547
6548 void
6549 region_model::purge_regions (const region_id_set &set_to_purge,
6550 purge_stats *stats,
6551 logger *)
6552 {
6553 /* Build a mapping from old-rid to new-rid so that we can preserve
6554 order of the used IDs and move all redundant ones to the end.
6555 Iterate though region IDs, adding used ones to the front of
6556 the new list, and unused ones to the back. */
6557 region_id_map map (m_regions.length ());
6558 int next_used_new_rid = 0;
6559 int after_next_unused_new_rid = m_regions.length ();
6560 for (unsigned i = 0; i < m_regions.length (); i++)
6561 {
6562 region_id src (region_id::from_int (i));
6563 if (set_to_purge.region_p (src))
6564 map.put (src, region_id::from_int (--after_next_unused_new_rid));
6565 else
6566 map.put (src, region_id::from_int (next_used_new_rid++));
6567 }
6568 /* The two insertion points should have met. */
6569 gcc_assert (next_used_new_rid == after_next_unused_new_rid);
6570
6571 /* Now walk the regions and svalues, remapping rids,
6572 so that all the redundant regions are at the end. */
6573 remap_region_ids (map);
6574
6575 /* Drop the redundant regions from the end of the vector. */
6576 while ((signed)m_regions.length () > next_used_new_rid)
6577 {
6578 delete m_regions.pop ();
6579 if (stats)
6580 stats->m_num_regions++;
6581 }
6582 }
6583
6584 /* Populate *OUT with RID and all of its descendents.
6585 If EXCLUDE_RID is non-null, then don't add it or its descendents. */
6586
6587 void
6588 region_model::get_descendents (region_id rid, region_id_set *out,
6589 region_id exclude_rid) const
6590 {
6591 out->add_region (rid);
6592
6593 bool changed = true;
6594 while (changed)
6595 {
6596 changed = false;
6597 unsigned i;
6598 region *r;
6599 FOR_EACH_VEC_ELT (m_regions, i, r)
6600 {
6601 region_id iter_rid = region_id::from_int (i);
6602 if (iter_rid == exclude_rid)
6603 continue;
6604 if (!out->region_p (iter_rid))
6605 {
6606 region_id parent_rid = r->get_parent ();
6607 if (!parent_rid.null_p ())
6608 if (out->region_p (parent_rid))
6609 {
6610 out->add_region (iter_rid);
6611 changed = true;
6612 }
6613 }
6614 }
6615 }
6616 }
6617
6618 /* Delete RID and all descendent regions.
6619 Find any pointers to such regions; convert them to
6620 poisoned values of kind PKIND.
6621 Accumulate stats on purged entities into STATS. */
6622
6623 void
6624 region_model::delete_region_and_descendents (region_id rid,
6625 enum poison_kind pkind,
6626 purge_stats *stats,
6627 logger *logger)
6628 {
6629 /* Find all child and descendent regions. */
6630 region_id_set descendents (this);
6631 get_descendents (rid, &descendents, region_id::null ());
6632
6633 /* Find any pointers to such regions; convert to poisoned. */
6634 poison_any_pointers_to_bad_regions (descendents, pkind);
6635
6636 /* Delete all such regions. */
6637 purge_regions (descendents, stats, logger);
6638 }
6639
6640 /* Find any pointers to regions within BAD_REGIONS; convert them to
6641 poisoned values of kind PKIND. */
6642
6643 void
6644 region_model::poison_any_pointers_to_bad_regions (const region_id_set &
6645 bad_regions,
6646 enum poison_kind pkind)
6647 {
6648 int i;
6649 svalue *sval;
6650 FOR_EACH_VEC_ELT (m_svalues, i, sval)
6651 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
6652 {
6653 region_id ptr_dst = ptr_sval->get_pointee ();
6654 if (!ptr_dst.null_p ())
6655 if (bad_regions.region_p (ptr_dst))
6656 replace_svalue
6657 (svalue_id::from_int (i),
6658 new poisoned_svalue (pkind, sval->get_type ()));
6659 }
6660 }
6661
6662 /* Attempt to merge THIS with OTHER_MODEL, writing the result
6663 to OUT_MODEL, and populating SID_MAPPING. */
6664
6665 bool
6666 region_model::can_merge_with_p (const region_model &other_model,
6667 region_model *out_model,
6668 svalue_id_merger_mapping *sid_mapping) const
6669 {
6670 gcc_assert (m_root_rid == other_model.m_root_rid);
6671 gcc_assert (m_root_rid.as_int () == 0);
6672 gcc_assert (sid_mapping);
6673 gcc_assert (out_model);
6674
6675 model_merger merger (this, &other_model, out_model, sid_mapping);
6676
6677 if (!root_region::can_merge_p (get_root_region (),
6678 other_model.get_root_region (),
6679 out_model->get_root_region (),
6680 &merger))
6681 return false;
6682
6683 /* Merge constraints. */
6684 constraint_manager::merge (*m_constraints,
6685 *other_model.m_constraints,
6686 out_model->m_constraints,
6687 merger);
6688
6689 out_model->validate ();
6690
6691 /* The merged model should be simpler (or as simple) as the inputs. */
6692 #if 0
6693 gcc_assert (out_model->m_svalues.length () <= m_svalues.length ());
6694 gcc_assert (out_model->m_svalues.length ()
6695 <= other_model.m_svalues.length ());
6696 #endif
6697 gcc_assert (out_model->m_regions.length () <= m_regions.length ());
6698 gcc_assert (out_model->m_regions.length ()
6699 <= other_model.m_regions.length ());
6700 // TODO: same, for constraints
6701
6702 return true;
6703 }
6704
6705 /* As above, but supply a placeholder svalue_id_merger_mapping
6706 instance to be used and receive output. For use in selftests. */
6707
6708 bool
6709 region_model::can_merge_with_p (const region_model &other_model,
6710 region_model *out_model) const
6711 {
6712 svalue_id_merger_mapping sid_mapping (*this, other_model);
6713 return can_merge_with_p (other_model, out_model, &sid_mapping);
6714 }
6715
6716 /* For debugging purposes: look for a region within this region_model
6717 for a decl named NAME (or an SSA_NAME for such a decl),
6718 returning its value, or svalue_id::null if none are found. */
6719
6720 svalue_id
6721 region_model::get_value_by_name (const char *name) const
6722 {
6723 gcc_assert (name);
6724 tree identifier = get_identifier (name);
6725 return get_root_region ()->get_value_by_name (identifier, *this);
6726 }
6727
6728 /* Generate or reuse an svalue_id within this model for an index
6729 into an array of type PTR_TYPE, based on OFFSET_SID. */
6730
6731 svalue_id
6732 region_model::convert_byte_offset_to_array_index (tree ptr_type,
6733 svalue_id offset_sid)
6734 {
6735 gcc_assert (POINTER_TYPE_P (ptr_type));
6736
6737 if (tree offset_cst = maybe_get_constant (offset_sid))
6738 {
6739 tree elem_type = TREE_TYPE (ptr_type);
6740
6741 /* Arithmetic on void-pointers is a GNU C extension, treating the size
6742 of a void as 1.
6743 https://gcc.gnu.org/onlinedocs/gcc/Pointer-Arith.html */
6744 if (TREE_CODE (elem_type) == VOID_TYPE)
6745 return offset_sid;
6746
6747 /* First, use int_size_in_bytes, to reject the case where we have an
6748 incomplete type, or a non-constant value. */
6749 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
6750 if (hwi_byte_size > 0)
6751 {
6752 /* Now call size_in_bytes to get the answer in tree form. */
6753 tree byte_size = size_in_bytes (elem_type);
6754 gcc_assert (byte_size);
6755 /* Try to get a constant by dividing, ensuring that we're in a
6756 signed representation first. */
6757 tree index
6758 = fold_binary (TRUNC_DIV_EXPR, ssizetype,
6759 fold_convert (ssizetype, offset_cst),
6760 fold_convert (ssizetype, byte_size));
6761 if (index && TREE_CODE (index) == INTEGER_CST)
6762 return get_or_create_constant_svalue (index);
6763 }
6764 }
6765
6766 /* Otherwise, we don't know the array index; generate a new unknown value.
6767 TODO: do we need to capture the relationship between two unknown
6768 values (the offset and the index)? */
6769 return add_svalue (new unknown_svalue (integer_type_node));
6770 }
6771
6772 /* Get a region of type TYPE for PTR_SID[OFFSET_SID/sizeof (*PTR_SID)].
6773
6774 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6775 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6776 and then get a view of type TYPE on the relevant array element. */
6777
6778 region_id
6779 region_model::get_or_create_mem_ref (tree type,
6780 svalue_id ptr_sid,
6781 svalue_id offset_sid,
6782 region_model_context *ctxt)
6783 {
6784 svalue *ptr_sval = get_svalue (ptr_sid);
6785 tree ptr_type = ptr_sval->get_type ();
6786 gcc_assert (ptr_type);
6787
6788 region_id raw_rid = deref_rvalue (ptr_sid, ctxt);
6789
6790 svalue *offset_sval = get_svalue (offset_sid);
6791 tree offset_type = offset_sval->get_type ();
6792 gcc_assert (offset_type);
6793
6794 if (constant_svalue *cst_sval = offset_sval->dyn_cast_constant_svalue ())
6795 {
6796 if (zerop (cst_sval->get_constant ()))
6797 {
6798 /* Handle the zero offset case. */
6799 return get_or_create_view (raw_rid, type, ctxt);
6800 }
6801
6802 /* If we're already within an array of the correct type,
6803 then we want to reuse that array, rather than starting
6804 a new view.
6805 If so, figure out our raw_rid's offset from its parent,
6806 if we can, and use that to offset OFFSET_SID, and create
6807 the element within the parent region. */
6808 region *raw_reg = get_region (raw_rid);
6809 region_id parent_rid = raw_reg->get_parent ();
6810 tree parent_type = get_region (parent_rid)->get_type ();
6811 if (parent_type
6812 && TREE_CODE (parent_type) == ARRAY_TYPE)
6813 {
6814 // TODO: check we have the correct parent type
6815 array_region *parent_array = get_region <array_region> (parent_rid);
6816 array_region::key_t key_for_raw_rid;
6817 if (parent_array->get_key_for_child_region (raw_rid,
6818 &key_for_raw_rid))
6819 {
6820 /* Convert from offset to index. */
6821 svalue_id index_sid
6822 = convert_byte_offset_to_array_index (ptr_type, offset_sid);
6823 if (tree index_cst
6824 = get_svalue (index_sid)->maybe_get_constant ())
6825 {
6826 array_region::key_t index_offset
6827 = array_region::key_from_constant (index_cst);
6828 array_region::key_t index_rel_to_parent
6829 = key_for_raw_rid + index_offset;
6830 tree index_rel_to_parent_cst
6831 = wide_int_to_tree (integer_type_node,
6832 index_rel_to_parent);
6833 svalue_id index_sid
6834 = get_or_create_constant_svalue (index_rel_to_parent_cst);
6835
6836 /* Carry on, using the parent region and adjusted index. */
6837 region_id element_rid
6838 = parent_array->get_element (this, raw_rid, index_sid,
6839 ctxt);
6840 return get_or_create_view (element_rid, type, ctxt);
6841 }
6842 }
6843 }
6844 }
6845
6846 tree array_type = build_array_type (TREE_TYPE (ptr_type),
6847 integer_type_node);
6848 region_id array_view_rid = get_or_create_view (raw_rid, array_type, ctxt);
6849 array_region *array_reg = get_region <array_region> (array_view_rid);
6850
6851 svalue_id index_sid
6852 = convert_byte_offset_to_array_index (ptr_type, offset_sid);
6853
6854 region_id element_rid
6855 = array_reg->get_element (this, array_view_rid, index_sid, ctxt);
6856
6857 return get_or_create_view (element_rid, type, ctxt);
6858 }
6859
6860 /* Get a region of type TYPE for PTR_SID + OFFSET_SID.
6861
6862 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6863 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6864 and then get a view of type TYPE on the relevant array element. */
6865
6866 region_id
6867 region_model::get_or_create_pointer_plus_expr (tree type,
6868 svalue_id ptr_sid,
6869 svalue_id offset_in_bytes_sid,
6870 region_model_context *ctxt)
6871 {
6872 return get_or_create_mem_ref (type,
6873 ptr_sid,
6874 offset_in_bytes_sid,
6875 ctxt);
6876 }
6877
6878 /* Get or create a view of type TYPE of the region with id RAW_ID.
6879 Return the id of the view (or RAW_ID if it of the same type). */
6880
6881 region_id
6882 region_model::get_or_create_view (region_id raw_rid, tree type,
6883 region_model_context *ctxt)
6884 {
6885 region *raw_region = get_region (raw_rid);
6886
6887 gcc_assert (TYPE_P (type));
6888 if (type != raw_region->get_type ())
6889 {
6890 /* If the region already has a view of the requested type,
6891 reuse it. */
6892 region_id existing_view_rid = raw_region->get_view (type, this);
6893 if (!existing_view_rid.null_p ())
6894 return existing_view_rid;
6895
6896 /* Otherwise, make one (adding it to the region_model and
6897 to the viewed region). */
6898 region_id view_rid = add_region_for_type (raw_rid, type, ctxt);
6899 raw_region->add_view (view_rid, this);
6900 // TODO: something to signify that this is a "view"
6901 return view_rid;
6902 }
6903
6904 return raw_rid;
6905 }
6906
6907 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6908 otherwise. */
6909
6910 tree
6911 region_model::get_fndecl_for_call (const gcall *call,
6912 region_model_context *ctxt)
6913 {
6914 tree fn_ptr = gimple_call_fn (call);
6915 if (fn_ptr == NULL_TREE)
6916 return NULL_TREE;
6917 svalue_id fn_ptr_sid = get_rvalue (fn_ptr, ctxt);
6918 svalue *fn_ptr_sval = get_svalue (fn_ptr_sid);
6919 if (region_svalue *fn_ptr_ptr = fn_ptr_sval->dyn_cast_region_svalue ())
6920 {
6921 region_id fn_rid = fn_ptr_ptr->get_pointee ();
6922 code_region *code = get_root_region ()->get_code_region (this);
6923 if (code)
6924 {
6925 tree fn_decl = code->get_tree_for_child_region (fn_rid);
6926 if (!fn_decl)
6927 return NULL_TREE;
6928 cgraph_node *node = cgraph_node::get (fn_decl);
6929 if (!node)
6930 return NULL_TREE;
6931 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
6932 if (ultimate_node)
6933 return ultimate_node->decl;
6934 }
6935 }
6936
6937 return NULL_TREE;
6938 }
6939
6940 /* struct model_merger. */
6941
6942 /* Dump a multiline representation of this merger to PP. */
6943
6944 void
6945 model_merger::dump_to_pp (pretty_printer *pp) const
6946 {
6947 pp_string (pp, "model A:");
6948 pp_newline (pp);
6949 m_model_a->dump_to_pp (pp, false);
6950 pp_newline (pp);
6951
6952 pp_string (pp, "model B:");
6953 pp_newline (pp);
6954 m_model_b->dump_to_pp (pp, false);
6955 pp_newline (pp);
6956
6957 pp_string (pp, "merged model:");
6958 pp_newline (pp);
6959 m_merged_model->dump_to_pp (pp, false);
6960 pp_newline (pp);
6961
6962 pp_string (pp, "region map: model A to merged model:");
6963 pp_newline (pp);
6964 m_map_regions_from_a_to_m.dump_to_pp (pp);
6965 pp_newline (pp);
6966
6967 pp_string (pp, "region map: model B to merged model:");
6968 pp_newline (pp);
6969 m_map_regions_from_b_to_m.dump_to_pp (pp);
6970 pp_newline (pp);
6971
6972 m_sid_mapping->dump_to_pp (pp);
6973 }
6974
6975 /* Dump a multiline representation of this merger to FILE. */
6976
6977 void
6978 model_merger::dump (FILE *fp) const
6979 {
6980 pretty_printer pp;
6981 pp_format_decoder (&pp) = default_tree_printer;
6982 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6983 pp.buffer->stream = fp;
6984 dump_to_pp (&pp);
6985 pp_flush (&pp);
6986 }
6987
6988 /* Dump a multiline representation of this merger to stderr. */
6989
6990 DEBUG_FUNCTION void
6991 model_merger::dump () const
6992 {
6993 dump (stderr);
6994 }
6995
6996 /* Attempt to merge the svalues of SID_A and SID_B (from their
6997 respective models), writing the id of the resulting svalue
6998 into *MERGED_SID.
6999 Return true if the merger is possible, false otherwise. */
7000
7001 bool
7002 model_merger::can_merge_values_p (svalue_id sid_a,
7003 svalue_id sid_b,
7004 svalue_id *merged_sid)
7005 {
7006 gcc_assert (merged_sid);
7007 svalue *sval_a = m_model_a->get_svalue (sid_a);
7008 svalue *sval_b = m_model_b->get_svalue (sid_b);
7009
7010 /* If both are NULL, then the "values" are trivially mergeable. */
7011 if (!sval_a && !sval_b)
7012 return true;
7013
7014 /* If one is NULL and the other non-NULL, then the "values"
7015 are not mergeable. */
7016 if (!(sval_a && sval_b))
7017 return false;
7018
7019 /* Have they both already been mapped to the same new svalue_id?
7020 If so, use it. */
7021 svalue_id sid_a_in_m
7022 = m_sid_mapping->m_map_from_a_to_m.get_dst_for_src (sid_a);
7023 svalue_id sid_b_in_m
7024 = m_sid_mapping->m_map_from_b_to_m.get_dst_for_src (sid_b);
7025 if (!sid_a_in_m.null_p ()
7026 && !sid_b_in_m.null_p ()
7027 && sid_a_in_m == sid_b_in_m)
7028 {
7029 *merged_sid = sid_a_in_m;
7030 return true;
7031 }
7032
7033 tree type = sval_a->get_type ();
7034 if (type == NULL_TREE)
7035 type = sval_b->get_type ();
7036
7037 /* If the values have different kinds, or are both unknown,
7038 then merge as "unknown". */
7039 if (sval_a->get_kind () != sval_b->get_kind ()
7040 || sval_a->get_kind () == SK_UNKNOWN)
7041 {
7042 svalue *merged_sval = new unknown_svalue (type);
7043 *merged_sid = m_merged_model->add_svalue (merged_sval);
7044 record_svalues (sid_a, sid_b, *merged_sid);
7045 return true;
7046 }
7047
7048 gcc_assert (sval_a->get_kind () == sval_b->get_kind ());
7049
7050 switch (sval_a->get_kind ())
7051 {
7052 default:
7053 case SK_UNKNOWN: /* SK_UNKNOWN handled above. */
7054 gcc_unreachable ();
7055
7056 case SK_REGION:
7057 {
7058 /* If we have two region pointers, then we can merge (possibly to
7059 "unknown"). */
7060 const region_svalue &region_sval_a = *as_a <region_svalue *> (sval_a);
7061 const region_svalue &region_sval_b = *as_a <region_svalue *> (sval_b);
7062 region_svalue::merge_values (region_sval_a, region_sval_b,
7063 merged_sid, type,
7064 this);
7065 record_svalues (sid_a, sid_b, *merged_sid);
7066 return true;
7067 }
7068 break;
7069 case SK_CONSTANT:
7070 {
7071 /* If we have two constants, then we can merge. */
7072 const constant_svalue &cst_sval_a = *as_a <constant_svalue *> (sval_a);
7073 const constant_svalue &cst_sval_b = *as_a <constant_svalue *> (sval_b);
7074 constant_svalue::merge_values (cst_sval_a, cst_sval_b,
7075 merged_sid, this);
7076 record_svalues (sid_a, sid_b, *merged_sid);
7077 return true;
7078 }
7079 break;
7080
7081 case SK_POISONED:
7082 case SK_SETJMP:
7083 return false;
7084 }
7085 }
7086
7087 /* Record that A_RID in model A and B_RID in model B
7088 correspond to MERGED_RID in the merged model, so
7089 that pointers can be accurately merged. */
7090
7091 void
7092 model_merger::record_regions (region_id a_rid,
7093 region_id b_rid,
7094 region_id merged_rid)
7095 {
7096 m_map_regions_from_a_to_m.put (a_rid, merged_rid);
7097 m_map_regions_from_b_to_m.put (b_rid, merged_rid);
7098 }
7099
7100 /* Record that A_SID in model A and B_SID in model B
7101 correspond to MERGED_SID in the merged model. */
7102
7103 void
7104 model_merger::record_svalues (svalue_id a_sid,
7105 svalue_id b_sid,
7106 svalue_id merged_sid)
7107 {
7108 gcc_assert (m_sid_mapping);
7109 m_sid_mapping->m_map_from_a_to_m.put (a_sid, merged_sid);
7110 m_sid_mapping->m_map_from_b_to_m.put (b_sid, merged_sid);
7111 }
7112
7113 /* struct svalue_id_merger_mapping. */
7114
7115 /* svalue_id_merger_mapping's ctor. */
7116
7117 svalue_id_merger_mapping::svalue_id_merger_mapping (const region_model &a,
7118 const region_model &b)
7119 : m_map_from_a_to_m (a.get_num_svalues ()),
7120 m_map_from_b_to_m (b.get_num_svalues ())
7121 {
7122 }
7123
7124 /* Dump a multiline representation of this to PP. */
7125
7126 void
7127 svalue_id_merger_mapping::dump_to_pp (pretty_printer *pp) const
7128 {
7129 pp_string (pp, "svalue_id map: model A to merged model:");
7130 pp_newline (pp);
7131 m_map_from_a_to_m.dump_to_pp (pp);
7132 pp_newline (pp);
7133
7134 pp_string (pp, "svalue_id map: model B to merged model:");
7135 pp_newline (pp);
7136 m_map_from_b_to_m.dump_to_pp (pp);
7137 pp_newline (pp);
7138 }
7139
7140 /* Dump a multiline representation of this to FILE. */
7141
7142 void
7143 svalue_id_merger_mapping::dump (FILE *fp) const
7144 {
7145 pretty_printer pp;
7146 pp_format_decoder (&pp) = default_tree_printer;
7147 pp_show_color (&pp) = pp_show_color (global_dc->printer);
7148 pp.buffer->stream = fp;
7149 dump_to_pp (&pp);
7150 pp_flush (&pp);
7151 }
7152
7153 /* Dump a multiline representation of this to stderr. */
7154
7155 DEBUG_FUNCTION void
7156 svalue_id_merger_mapping::dump () const
7157 {
7158 dump (stderr);
7159 }
7160
7161 /* struct canonicalization. */
7162
7163 /* canonicalization's ctor. */
7164
7165 canonicalization::canonicalization (const region_model &model)
7166 : m_model (model),
7167 m_rid_map (model.get_num_regions ()),
7168 m_sid_map (model.get_num_svalues ()),
7169 m_next_rid_int (0),
7170 m_next_sid_int (0)
7171 {
7172 }
7173
7174 /* If we've not seen RID yet, assign it a canonicalized region_id,
7175 and walk the region's svalue and then the region. */
7176
7177 void
7178 canonicalization::walk_rid (region_id rid)
7179 {
7180 /* Stop if we've already seen RID. */
7181 if (!m_rid_map.get_dst_for_src (rid).null_p ())
7182 return;
7183
7184 region *region = m_model.get_region (rid);
7185 if (region)
7186 {
7187 m_rid_map.put (rid, region_id::from_int (m_next_rid_int++));
7188 walk_sid (region->get_value_direct ());
7189 region->walk_for_canonicalization (this);
7190 }
7191 }
7192
7193 /* If we've not seen SID yet, assign it a canonicalized svalue_id,
7194 and walk the svalue (and potentially regions e.g. for ptr values). */
7195
7196 void
7197 canonicalization::walk_sid (svalue_id sid)
7198 {
7199 /* Stop if we've already seen SID. */
7200 if (!m_sid_map.get_dst_for_src (sid).null_p ())
7201 return;
7202
7203 svalue *sval = m_model.get_svalue (sid);
7204 if (sval)
7205 {
7206 m_sid_map.put (sid, svalue_id::from_int (m_next_sid_int++));
7207 /* Potentially walk regions e.g. for ptrs. */
7208 sval->walk_for_canonicalization (this);
7209 }
7210 }
7211
7212 /* Dump a multiline representation of this to PP. */
7213
7214 void
7215 canonicalization::dump_to_pp (pretty_printer *pp) const
7216 {
7217 pp_string (pp, "region_id map:");
7218 pp_newline (pp);
7219 m_rid_map.dump_to_pp (pp);
7220 pp_newline (pp);
7221
7222 pp_string (pp, "svalue_id map:");
7223 pp_newline (pp);
7224 m_sid_map.dump_to_pp (pp);
7225 pp_newline (pp);
7226 }
7227
7228 /* Dump a multiline representation of this to FILE. */
7229
7230 void
7231 canonicalization::dump (FILE *fp) const
7232 {
7233 pretty_printer pp;
7234 pp_format_decoder (&pp) = default_tree_printer;
7235 pp_show_color (&pp) = pp_show_color (global_dc->printer);
7236 pp.buffer->stream = fp;
7237 dump_to_pp (&pp);
7238 pp_flush (&pp);
7239 }
7240
7241 /* Dump a multiline representation of this to stderr. */
7242
7243 DEBUG_FUNCTION void
7244 canonicalization::dump () const
7245 {
7246 dump (stderr);
7247 }
7248
7249 } // namespace ana
7250
7251 /* Update HSTATE with a hash of SID. */
7252
7253 void
7254 inchash::add (svalue_id sid, inchash::hash &hstate)
7255 {
7256 hstate.add_int (sid.as_int ());
7257 }
7258
7259 /* Update HSTATE with a hash of RID. */
7260
7261 void
7262 inchash::add (region_id rid, inchash::hash &hstate)
7263 {
7264 hstate.add_int (rid.as_int ());
7265 }
7266
7267 /* Dump RMODEL fully to stderr (i.e. without summarization). */
7268
7269 DEBUG_FUNCTION void
7270 debug (const region_model &rmodel)
7271 {
7272 rmodel.dump (false);
7273 }
7274
7275 namespace ana {
7276
7277 #if CHECKING_P
7278
7279 namespace selftest {
7280
7281 /* Build a constant tree of the given type from STR. */
7282
7283 static tree
7284 build_real_cst_from_string (tree type, const char *str)
7285 {
7286 REAL_VALUE_TYPE real;
7287 real_from_string (&real, str);
7288 return build_real (type, real);
7289 }
7290
7291 /* Append various "interesting" constants to OUT (e.g. NaN). */
7292
7293 static void
7294 append_interesting_constants (auto_vec<tree> *out)
7295 {
7296 out->safe_push (build_int_cst (integer_type_node, 0));
7297 out->safe_push (build_int_cst (integer_type_node, 42));
7298 out->safe_push (build_int_cst (unsigned_type_node, 0));
7299 out->safe_push (build_int_cst (unsigned_type_node, 42));
7300 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
7301 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
7302 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
7303 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
7304 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
7305 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
7306 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
7307 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
7308 }
7309
7310 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
7311 if the underlying constants aren't comparable. */
7312
7313 static void
7314 test_tree_cmp_on_constants ()
7315 {
7316 auto_vec<tree> csts;
7317 append_interesting_constants (&csts);
7318
7319 /* Try sorting every triple. */
7320 const unsigned num = csts.length ();
7321 for (unsigned i = 0; i < num; i++)
7322 for (unsigned j = 0; j < num; j++)
7323 for (unsigned k = 0; k < num; k++)
7324 {
7325 auto_vec<tree> v (3);
7326 v.quick_push (csts[i]);
7327 v.quick_push (csts[j]);
7328 v.quick_push (csts[k]);
7329 v.qsort (tree_cmp);
7330 }
7331 }
7332
7333 /* Implementation detail of the ASSERT_CONDITION_* macros. */
7334
7335 void
7336 assert_condition (const location &loc,
7337 region_model &model,
7338 tree lhs, tree_code op, tree rhs,
7339 tristate expected)
7340 {
7341 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
7342 ASSERT_EQ_AT (loc, actual, expected);
7343 }
7344
7345 /* Implementation detail of ASSERT_DUMP_TREE_EQ. */
7346
7347 static void
7348 assert_dump_tree_eq (const location &loc, tree t, const char *expected)
7349 {
7350 auto_fix_quotes sentinel;
7351 pretty_printer pp;
7352 pp_format_decoder (&pp) = default_tree_printer;
7353 dump_tree (&pp, t);
7354 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7355 }
7356
7357 /* Assert that dump_tree (T) is EXPECTED. */
7358
7359 #define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
7360 SELFTEST_BEGIN_STMT \
7361 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
7362 SELFTEST_END_STMT
7363
7364 /* Implementation detail of ASSERT_DUMP_EQ. */
7365
7366 static void
7367 assert_dump_eq (const location &loc,
7368 const region_model &model,
7369 bool summarize,
7370 const char *expected)
7371 {
7372 auto_fix_quotes sentinel;
7373 pretty_printer pp;
7374 pp_format_decoder (&pp) = default_tree_printer;
7375 model.dump_to_pp (&pp, summarize);
7376 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7377 }
7378
7379 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
7380
7381 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
7382 SELFTEST_BEGIN_STMT \
7383 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
7384 SELFTEST_END_STMT
7385
7386 /* Smoketest for region_model::dump_to_pp. */
7387
7388 static void
7389 test_dump ()
7390 {
7391 region_model model;
7392 model.get_root_region ()->ensure_stack_region (&model);
7393 model.get_root_region ()->ensure_globals_region (&model);
7394 model.get_root_region ()->ensure_heap_region (&model);
7395
7396 ASSERT_DUMP_EQ (model, false,
7397 "r0: {kind: `root', parent: null, sval: null}\n"
7398 "|-stack: r1: {kind: `stack', parent: r0, sval: sv0}\n"
7399 "| |: sval: sv0: {poisoned: uninit}\n"
7400 "|-globals: r2: {kind: `globals', parent: r0, sval: null, map: {}}\n"
7401 "`-heap: r3: {kind: `heap', parent: r0, sval: sv1}\n"
7402 " |: sval: sv1: {poisoned: uninit}\n"
7403 "svalues:\n"
7404 " sv0: {poisoned: uninit}\n"
7405 " sv1: {poisoned: uninit}\n"
7406 "constraint manager:\n"
7407 " equiv classes:\n"
7408 " constraints:\n");
7409 ASSERT_DUMP_EQ (model, true, "");
7410 }
7411
7412 /* Helper function for selftests. Create a struct or union type named NAME,
7413 with the fields given by the FIELD_DECLS in FIELDS.
7414 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
7415 create a UNION_TYPE. */
7416
7417 static tree
7418 make_test_compound_type (const char *name, bool is_struct,
7419 const auto_vec<tree> *fields)
7420 {
7421 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
7422 TYPE_NAME (t) = get_identifier (name);
7423 TYPE_SIZE (t) = 0;
7424
7425 tree fieldlist = NULL;
7426 int i;
7427 tree field;
7428 FOR_EACH_VEC_ELT (*fields, i, field)
7429 {
7430 gcc_assert (TREE_CODE (field) == FIELD_DECL);
7431 DECL_CONTEXT (field) = t;
7432 fieldlist = chainon (field, fieldlist);
7433 }
7434 fieldlist = nreverse (fieldlist);
7435 TYPE_FIELDS (t) = fieldlist;
7436
7437 layout_type (t);
7438 return t;
7439 }
7440
7441 /* Verify that dumps can show struct fields. */
7442
7443 static void
7444 test_dump_2 ()
7445 {
7446 auto_vec<tree> fields;
7447 tree x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7448 get_identifier ("x"), integer_type_node);
7449 fields.safe_push (x_field);
7450 tree y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7451 get_identifier ("y"), integer_type_node);
7452 fields.safe_push (y_field);
7453 tree coord_type = make_test_compound_type ("coord", true, &fields);
7454
7455 tree c = build_global_decl ("c", coord_type);
7456 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (x_field),
7457 c, x_field, NULL_TREE);
7458 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (y_field),
7459 c, y_field, NULL_TREE);
7460
7461 tree int_17 = build_int_cst (integer_type_node, 17);
7462 tree int_m3 = build_int_cst (integer_type_node, -3);
7463
7464 region_model model;
7465 model.set_value (c_x, int_17, NULL);
7466 model.set_value (c_y, int_m3, NULL);
7467
7468 /* Simplified dump. */
7469 ASSERT_DUMP_EQ (model, true, "c.x: 17, c.y: -3");
7470
7471 /* Full dump. */
7472 ASSERT_DUMP_EQ
7473 (model, false,
7474 "r0: {kind: `root', parent: null, sval: null}\n"
7475 "`-globals: r1: {kind: `globals', parent: r0, sval: null, map: {`c': r2}}\n"
7476 " `-`c': r2: {kind: `struct', parent: r1, sval: null, type: `struct coord', map: {`x': r3, `y': r4}}\n"
7477 " |: type: `struct coord'\n"
7478 " |-`x': r3: {kind: `primitive', parent: r2, sval: sv0, type: `int'}\n"
7479 " | |: sval: sv0: {type: `int', `17'}\n"
7480 " | |: type: `int'\n"
7481 " `-`y': r4: {kind: `primitive', parent: r2, sval: sv1, type: `int'}\n"
7482 " |: sval: sv1: {type: `int', `-3'}\n"
7483 " |: type: `int'\n"
7484 "svalues:\n"
7485 " sv0: {type: `int', `17'}\n"
7486 " sv1: {type: `int', `-3'}\n"
7487 "constraint manager:\n"
7488 " equiv classes:\n"
7489 " constraints:\n");
7490 }
7491
7492 /* Verify that dumps can show array elements. */
7493
7494 static void
7495 test_dump_3 ()
7496 {
7497 tree tlen = size_int (10);
7498 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7499
7500 tree a = build_global_decl ("a", arr_type);
7501
7502 region_model model;
7503 tree int_0 = build_int_cst (integer_type_node, 0);
7504 tree a_0 = build4 (ARRAY_REF, char_type_node,
7505 a, int_0, NULL_TREE, NULL_TREE);
7506 tree char_A = build_int_cst (char_type_node, 'A');
7507 model.set_value (a_0, char_A, NULL);
7508
7509 /* Simplified dump. */
7510 ASSERT_DUMP_EQ (model, true, "a[0]: 65");
7511
7512 /* Full dump. */
7513 ASSERT_DUMP_EQ
7514 (model, false,
7515 "r0: {kind: `root', parent: null, sval: null}\n"
7516 "`-globals: r1: {kind: `globals', parent: r0, sval: null, map: {`a': r2}}\n"
7517 " `-`a': r2: {kind: `array', parent: r1, sval: null, type: `char[11]', array: {[0]: r3}}\n"
7518 " |: type: `char[11]'\n"
7519 " `-[0]: r3: {kind: `primitive', parent: r2, sval: sv1, type: `char'}\n"
7520 " |: sval: sv1: {type: `char', `65'}\n"
7521 " |: type: `char'\n"
7522 "svalues:\n"
7523 " sv0: {type: `int', `0'}\n"
7524 " sv1: {type: `char', `65'}\n"
7525 "constraint manager:\n"
7526 " equiv classes:\n"
7527 " constraints:\n");
7528 }
7529
7530 /* Verify that region_model::get_representative_tree works as expected. */
7531
7532 static void
7533 test_get_representative_tree ()
7534 {
7535 /* STRING_CST. */
7536 {
7537 tree string_cst = build_string (4, "foo");
7538 region_model m;
7539 svalue_id str_sid = m.get_rvalue (string_cst, NULL);
7540 tree rep = m.get_representative_tree (str_sid);
7541 ASSERT_EQ (rep, string_cst);
7542 }
7543
7544 /* String literal. */
7545 {
7546 tree string_cst_ptr = build_string_literal (4, "foo");
7547 region_model m;
7548 svalue_id str_sid = m.get_rvalue (string_cst_ptr, NULL);
7549 tree rep = m.get_representative_tree (str_sid);
7550 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
7551 }
7552 }
7553
7554 /* Verify that calling region_model::get_rvalue repeatedly on the same
7555 tree constant retrieves the same svalue_id. */
7556
7557 static void
7558 test_unique_constants ()
7559 {
7560 tree int_0 = build_int_cst (integer_type_node, 0);
7561 tree int_42 = build_int_cst (integer_type_node, 42);
7562
7563 test_region_model_context ctxt;
7564 region_model model;
7565 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
7566 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
7567 model.get_rvalue (int_42, &ctxt));
7568 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
7569 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
7570 }
7571
7572 /* Check that operator== and hashing works as expected for the
7573 various svalue subclasses. */
7574
7575 static void
7576 test_svalue_equality ()
7577 {
7578 tree int_42 = build_int_cst (integer_type_node, 42);
7579 tree int_0 = build_int_cst (integer_type_node, 0);
7580
7581 /* Create pairs instances of the various subclasses of svalue,
7582 testing for hash and equality between (this, this) and
7583 (this, other of same subclass). */
7584 svalue *ptr_to_r0
7585 = new region_svalue (ptr_type_node, region_id::from_int (0));
7586 svalue *ptr_to_r1
7587 = new region_svalue (ptr_type_node, region_id::from_int (1));
7588
7589 ASSERT_EQ (ptr_to_r0->hash (), ptr_to_r0->hash ());
7590 ASSERT_EQ (*ptr_to_r0, *ptr_to_r0);
7591
7592 ASSERT_NE (ptr_to_r0->hash (), ptr_to_r1->hash ());
7593 ASSERT_NE (*ptr_to_r0, *ptr_to_r1);
7594
7595 svalue *cst_int_42 = new constant_svalue (int_42);
7596 svalue *cst_int_0 = new constant_svalue (int_0);
7597
7598 ASSERT_EQ (cst_int_42->hash (), cst_int_42->hash ());
7599 ASSERT_EQ (*cst_int_42, *cst_int_42);
7600
7601 ASSERT_NE (cst_int_42->hash (), cst_int_0->hash ());
7602 ASSERT_NE (*cst_int_42, *cst_int_0);
7603
7604 svalue *uninit = new poisoned_svalue (POISON_KIND_UNINIT, NULL_TREE);
7605 svalue *freed = new poisoned_svalue (POISON_KIND_FREED, NULL_TREE);
7606
7607 ASSERT_EQ (uninit->hash (), uninit->hash ());
7608 ASSERT_EQ (*uninit, *uninit);
7609
7610 ASSERT_NE (uninit->hash (), freed->hash ());
7611 ASSERT_NE (*uninit, *freed);
7612
7613 svalue *unknown_0 = new unknown_svalue (ptr_type_node);
7614 svalue *unknown_1 = new unknown_svalue (ptr_type_node);
7615 ASSERT_EQ (unknown_0->hash (), unknown_0->hash ());
7616 ASSERT_EQ (*unknown_0, *unknown_0);
7617 ASSERT_EQ (*unknown_1, *unknown_1);
7618
7619 /* Comparisons between different kinds of svalue. */
7620 ASSERT_NE (*ptr_to_r0, *cst_int_42);
7621 ASSERT_NE (*ptr_to_r0, *uninit);
7622 ASSERT_NE (*ptr_to_r0, *unknown_0);
7623 ASSERT_NE (*cst_int_42, *ptr_to_r0);
7624 ASSERT_NE (*cst_int_42, *uninit);
7625 ASSERT_NE (*cst_int_42, *unknown_0);
7626 ASSERT_NE (*uninit, *ptr_to_r0);
7627 ASSERT_NE (*uninit, *cst_int_42);
7628 ASSERT_NE (*uninit, *unknown_0);
7629 ASSERT_NE (*unknown_0, *ptr_to_r0);
7630 ASSERT_NE (*unknown_0, *cst_int_42);
7631 ASSERT_NE (*unknown_0, *uninit);
7632
7633 delete ptr_to_r0;
7634 delete ptr_to_r1;
7635 delete cst_int_42;
7636 delete cst_int_0;
7637 delete uninit;
7638 delete freed;
7639 delete unknown_0;
7640 delete unknown_1;
7641 }
7642
7643 /* Check that operator== and hashing works as expected for the
7644 various region subclasses. */
7645
7646 static void
7647 test_region_equality ()
7648 {
7649 region *r0
7650 = new primitive_region (region_id::from_int (3), integer_type_node);
7651 region *r1
7652 = new primitive_region (region_id::from_int (4), integer_type_node);
7653
7654 ASSERT_EQ (*r0, *r0);
7655 ASSERT_EQ (r0->hash (), r0->hash ());
7656 ASSERT_NE (*r0, *r1);
7657 ASSERT_NE (r0->hash (), r1->hash ());
7658
7659 delete r0;
7660 delete r1;
7661
7662 // TODO: test coverage for the map within a map_region
7663 }
7664
7665 /* A subclass of purge_criteria for selftests: purge all svalue_id instances. */
7666
7667 class purge_all_svalue_ids : public purge_criteria
7668 {
7669 public:
7670 bool should_purge_p (svalue_id) const FINAL OVERRIDE
7671 {
7672 return true;
7673 }
7674 };
7675
7676 /* A subclass of purge_criteria: purge a specific svalue_id. */
7677
7678 class purge_one_svalue_id : public purge_criteria
7679 {
7680 public:
7681 purge_one_svalue_id (svalue_id victim) : m_victim (victim) {}
7682
7683 purge_one_svalue_id (region_model model, tree expr)
7684 : m_victim (model.get_rvalue (expr, NULL)) {}
7685
7686 bool should_purge_p (svalue_id sid) const FINAL OVERRIDE
7687 {
7688 return sid == m_victim;
7689 }
7690
7691 private:
7692 svalue_id m_victim;
7693 };
7694
7695 /* Check that constraint_manager::purge works for individual svalue_ids. */
7696
7697 static void
7698 test_purging_by_criteria ()
7699 {
7700 tree int_42 = build_int_cst (integer_type_node, 42);
7701 tree int_0 = build_int_cst (integer_type_node, 0);
7702
7703 tree x = build_global_decl ("x", integer_type_node);
7704 tree y = build_global_decl ("y", integer_type_node);
7705
7706 {
7707 region_model model0;
7708 region_model model1;
7709
7710 ADD_SAT_CONSTRAINT (model1, x, EQ_EXPR, y);
7711 ASSERT_NE (model0, model1);
7712
7713 purge_stats stats_for_px;
7714 purge_one_svalue_id px (model1, x);
7715 model1.get_constraints ()->purge (px, &stats_for_px);
7716 ASSERT_EQ (stats_for_px.m_num_equiv_classes, 0);
7717
7718 purge_stats stats_for_py;
7719 purge_one_svalue_id py (model1.get_rvalue (y, NULL));
7720 model1.get_constraints ()->purge (py, &stats_for_py);
7721 ASSERT_EQ (stats_for_py.m_num_equiv_classes, 1);
7722
7723 ASSERT_EQ (*model0.get_constraints (), *model1.get_constraints ());
7724 }
7725
7726 {
7727 region_model model0;
7728 region_model model1;
7729
7730 ADD_SAT_CONSTRAINT (model1, x, EQ_EXPR, int_42);
7731 ASSERT_NE (model0, model1);
7732 ASSERT_CONDITION_TRUE (model1, x, EQ_EXPR, int_42);
7733
7734 purge_stats stats;
7735 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats);
7736
7737 ASSERT_CONDITION_UNKNOWN (model1, x, EQ_EXPR, int_42);
7738 }
7739
7740 {
7741 region_model model0;
7742 region_model model1;
7743
7744 ADD_SAT_CONSTRAINT (model1, x, GE_EXPR, int_0);
7745 ADD_SAT_CONSTRAINT (model1, x, LE_EXPR, int_42);
7746 ASSERT_NE (model0, model1);
7747
7748 ASSERT_CONDITION_TRUE (model1, x, GE_EXPR, int_0);
7749 ASSERT_CONDITION_TRUE (model1, x, LE_EXPR, int_42);
7750
7751 purge_stats stats;
7752 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats);
7753
7754 ASSERT_CONDITION_UNKNOWN (model1, x, GE_EXPR, int_0);
7755 ASSERT_CONDITION_UNKNOWN (model1, x, LE_EXPR, int_42);
7756 }
7757
7758 {
7759 region_model model0;
7760 region_model model1;
7761
7762 ADD_SAT_CONSTRAINT (model1, x, NE_EXPR, int_42);
7763 ADD_SAT_CONSTRAINT (model1, y, NE_EXPR, int_0);
7764 ASSERT_NE (model0, model1);
7765 ASSERT_CONDITION_TRUE (model1, x, NE_EXPR, int_42);
7766 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0);
7767
7768 purge_stats stats;
7769 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats);
7770 ASSERT_NE (model0, model1);
7771
7772 ASSERT_CONDITION_UNKNOWN (model1, x, NE_EXPR, int_42);
7773 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0);
7774 }
7775
7776 {
7777 region_model model0;
7778 region_model model1;
7779
7780 ADD_SAT_CONSTRAINT (model1, x, NE_EXPR, int_42);
7781 ADD_SAT_CONSTRAINT (model1, y, NE_EXPR, int_0);
7782 ASSERT_NE (model0, model1);
7783 ASSERT_CONDITION_TRUE (model1, x, NE_EXPR, int_42);
7784 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0);
7785
7786 purge_stats stats;
7787 model1.get_constraints ()->purge (purge_all_svalue_ids (), &stats);
7788 ASSERT_CONDITION_UNKNOWN (model1, x, NE_EXPR, int_42);
7789 ASSERT_CONDITION_UNKNOWN (model1, y, NE_EXPR, int_0);
7790 }
7791
7792 }
7793
7794 /* Test that region_model::purge_unused_svalues works as expected. */
7795
7796 static void
7797 test_purge_unused_svalues ()
7798 {
7799 tree int_42 = build_int_cst (integer_type_node, 42);
7800 tree int_0 = build_int_cst (integer_type_node, 0);
7801 tree x = build_global_decl ("x", integer_type_node);
7802 tree y = build_global_decl ("y", integer_type_node);
7803
7804 test_region_model_context ctxt;
7805 region_model model;
7806 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x),
7807 &ctxt);
7808 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x),
7809 &ctxt);
7810 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x),
7811 &ctxt);
7812 model.add_constraint (x, NE_EXPR, int_42, &ctxt);
7813
7814 model.set_value (model.get_lvalue (x, &ctxt),
7815 model.get_rvalue (int_42, &ctxt),
7816 &ctxt);
7817 model.add_constraint (y, GT_EXPR, int_0, &ctxt);
7818
7819 /* The redundant unknown values should have been purged. */
7820 purge_stats purged;
7821 model.purge_unused_svalues (&purged, NULL);
7822 ASSERT_EQ (purged.m_num_svalues, 3);
7823
7824 /* and the redundant constraint on an old, unknown value for x should
7825 have been purged. */
7826 ASSERT_EQ (purged.m_num_equiv_classes, 1);
7827 ASSERT_EQ (purged.m_num_constraints, 1);
7828 ASSERT_EQ (model.get_constraints ()->m_constraints.length (), 2);
7829
7830 /* ...but we should still have x == 42. */
7831 ASSERT_EQ (model.eval_condition (x, EQ_EXPR, int_42, &ctxt),
7832 tristate::TS_TRUE);
7833
7834 /* ...and we should still have the constraint on y. */
7835 ASSERT_EQ (model.eval_condition (y, GT_EXPR, int_0, &ctxt),
7836 tristate::TS_TRUE);
7837
7838 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
7839 }
7840
7841 /* Verify that simple assignments work as expected. */
7842
7843 static void
7844 test_assignment ()
7845 {
7846 tree int_0 = build_int_cst (integer_type_node, 0);
7847 tree x = build_global_decl ("x", integer_type_node);
7848 tree y = build_global_decl ("y", integer_type_node);
7849
7850 /* "x == 0", then use of y, then "y = 0;". */
7851 region_model model;
7852 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7853 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7854 model.set_value (model.get_lvalue (y, NULL),
7855 model.get_rvalue (int_0, NULL),
7856 NULL);
7857 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7858 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
7859
7860 ASSERT_DUMP_EQ (model, true, "y: 0, {x}: unknown, x == y");
7861 }
7862
7863 /* Verify the details of pushing and popping stack frames. */
7864
7865 static void
7866 test_stack_frames ()
7867 {
7868 tree int_42 = build_int_cst (integer_type_node, 42);
7869 tree int_10 = build_int_cst (integer_type_node, 10);
7870 tree int_5 = build_int_cst (integer_type_node, 5);
7871 tree int_0 = build_int_cst (integer_type_node, 0);
7872
7873 auto_vec <tree> param_types;
7874 tree parent_fndecl = make_fndecl (integer_type_node,
7875 "parent_fn",
7876 param_types);
7877 allocate_struct_function (parent_fndecl, true);
7878
7879 tree child_fndecl = make_fndecl (integer_type_node,
7880 "child_fn",
7881 param_types);
7882 allocate_struct_function (child_fndecl, true);
7883
7884 /* "a" and "b" in the parent frame. */
7885 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7886 get_identifier ("a"),
7887 integer_type_node);
7888 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7889 get_identifier ("b"),
7890 integer_type_node);
7891 /* "x" and "y" in a child frame. */
7892 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7893 get_identifier ("x"),
7894 integer_type_node);
7895 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7896 get_identifier ("y"),
7897 integer_type_node);
7898
7899 /* "p" global. */
7900 tree p = build_global_decl ("p", ptr_type_node);
7901
7902 /* "q" global. */
7903 tree q = build_global_decl ("q", ptr_type_node);
7904
7905 test_region_model_context ctxt;
7906 region_model model;
7907
7908 /* Push stack frame for "parent_fn". */
7909 region_id parent_frame_rid
7910 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl), NULL, &ctxt);
7911 ASSERT_EQ (model.get_current_frame_id (), parent_frame_rid);
7912 region_id a_in_parent_rid = model.get_lvalue (a, &ctxt);
7913 model.set_value (a_in_parent_rid, model.get_rvalue (int_42, &ctxt), &ctxt);
7914 model.set_to_new_unknown_value (model.get_lvalue (b, &ctxt),
7915 integer_type_node, &ctxt);
7916 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
7917 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7918 tristate (tristate::TS_TRUE));
7919
7920 /* Push stack frame for "child_fn". */
7921 region_id child_frame_rid
7922 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
7923 ASSERT_EQ (model.get_current_frame_id (), child_frame_rid);
7924 region_id x_in_child_rid = model.get_lvalue (x, &ctxt);
7925 model.set_value (x_in_child_rid, model.get_rvalue (int_0, &ctxt), &ctxt);
7926 model.set_to_new_unknown_value (model.get_lvalue (y, &ctxt),
7927 integer_type_node, &ctxt);
7928 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
7929 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
7930 tristate (tristate::TS_TRUE));
7931
7932 /* Point a global pointer at a local in the child frame: p = &x. */
7933 region_id p_in_globals_rid = model.get_lvalue (p, &ctxt);
7934 model.set_value (p_in_globals_rid,
7935 model.get_or_create_ptr_svalue (ptr_type_node,
7936 x_in_child_rid),
7937 &ctxt);
7938
7939 /* Point another global pointer at p: q = &p. */
7940 region_id q_in_globals_rid = model.get_lvalue (q, &ctxt);
7941 model.set_value (q_in_globals_rid,
7942 model.get_or_create_ptr_svalue (ptr_type_node,
7943 p_in_globals_rid),
7944 &ctxt);
7945
7946 /* Test get_descendents. */
7947 region_id_set descendents (&model);
7948 model.get_descendents (child_frame_rid, &descendents, region_id::null ());
7949 ASSERT_TRUE (descendents.region_p (child_frame_rid));
7950 ASSERT_TRUE (descendents.region_p (x_in_child_rid));
7951 ASSERT_FALSE (descendents.region_p (a_in_parent_rid));
7952 ASSERT_EQ (descendents.num_regions (), 3);
7953 #if 0
7954 auto_vec<region_id> test_vec;
7955 for (region_id_set::iterator_t iter = descendents.begin ();
7956 iter != descendents.end ();
7957 ++iter)
7958 test_vec.safe_push (*iter);
7959 gcc_unreachable (); // TODO
7960 //ASSERT_EQ ();
7961 #endif
7962
7963 ASSERT_DUMP_EQ (model, true,
7964 "a: 42, x: 0, p: &x, q: &p, {b, y}: unknown, b < 10, y != 5");
7965
7966 /* Pop the "child_fn" frame from the stack. */
7967 purge_stats purged;
7968 model.pop_frame (true, &purged, &ctxt);
7969
7970 /* We should have purged the unknown values for x and y. */
7971 ASSERT_EQ (purged.m_num_svalues, 2);
7972
7973 /* We should have purged the frame region and the regions for x and y. */
7974 ASSERT_EQ (purged.m_num_regions, 3);
7975
7976 /* We should have purged the constraint on y. */
7977 ASSERT_EQ (purged.m_num_equiv_classes, 1);
7978 ASSERT_EQ (purged.m_num_constraints, 1);
7979
7980 /* Verify that p (which was pointing at the local "x" in the popped
7981 frame) has been poisoned. */
7982 svalue *new_p_sval = model.get_svalue (model.get_rvalue (p, &ctxt));
7983 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
7984 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7985 POISON_KIND_POPPED_STACK);
7986
7987 /* Verify that q still points to p, in spite of the region
7988 renumbering. */
7989 svalue *new_q_sval = model.get_svalue (model.get_rvalue (q, &ctxt));
7990 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
7991 ASSERT_EQ (new_q_sval->dyn_cast_region_svalue ()->get_pointee (),
7992 model.get_lvalue (p, &ctxt));
7993
7994 /* Verify that top of stack has been updated. */
7995 ASSERT_EQ (model.get_current_frame_id (), parent_frame_rid);
7996
7997 /* Verify locals in parent frame. */
7998 /* Verify "a" still has its value. */
7999 svalue *new_a_sval = model.get_svalue (model.get_rvalue (a, &ctxt));
8000 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
8001 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
8002 int_42);
8003 /* Verify "b" still has its constraint. */
8004 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8005 tristate (tristate::TS_TRUE));
8006 }
8007
8008 /* Verify that get_representative_path_var works as expected, that
8009 we can map from region ids to parms and back within a recursive call
8010 stack. */
8011
8012 static void
8013 test_get_representative_path_var ()
8014 {
8015 auto_vec <tree> param_types;
8016 tree fndecl = make_fndecl (integer_type_node,
8017 "factorial",
8018 param_types);
8019 allocate_struct_function (fndecl, true);
8020
8021 /* Parm "n". */
8022 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8023 get_identifier ("n"),
8024 integer_type_node);
8025
8026 region_model model;
8027
8028 /* Push 5 stack frames for "factorial", each with a param */
8029 auto_vec<region_id> parm_rids;
8030 auto_vec<svalue_id> parm_sids;
8031 for (int depth = 0; depth < 5; depth++)
8032 {
8033 region_id frame_rid
8034 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, NULL);
8035 region_id rid_n = model.get_lvalue (path_var (n, depth), NULL);
8036 parm_rids.safe_push (rid_n);
8037
8038 ASSERT_EQ (model.get_region (rid_n)->get_parent (), frame_rid);
8039
8040 svalue_id sid_n
8041 = model.set_to_new_unknown_value (rid_n, integer_type_node, NULL);
8042 parm_sids.safe_push (sid_n);
8043 }
8044
8045 /* Verify that we can recognize that the regions are the parms,
8046 at every depth. */
8047 for (int depth = 0; depth < 5; depth++)
8048 {
8049 ASSERT_EQ (model.get_representative_path_var (parm_rids[depth]),
8050 path_var (n, depth));
8051 /* ...and that we can lookup lvalues for locals for all frames,
8052 not just the top. */
8053 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
8054 parm_rids[depth]);
8055 /* ...and that we can locate the svalues. */
8056 auto_vec<path_var> pvs;
8057 model.get_path_vars_for_svalue (parm_sids[depth], &pvs);
8058 ASSERT_EQ (pvs.length (), 1);
8059 ASSERT_EQ (pvs[0], path_var (n, depth));
8060 }
8061 }
8062
8063 /* Verify that the core regions within a region_model are in a consistent
8064 order after canonicalization. */
8065
8066 static void
8067 test_canonicalization_1 ()
8068 {
8069 region_model model0;
8070 model0.get_root_region ()->ensure_stack_region (&model0);
8071 model0.get_root_region ()->ensure_globals_region (&model0);
8072
8073 region_model model1;
8074 model1.get_root_region ()->ensure_globals_region (&model1);
8075 model1.get_root_region ()->ensure_stack_region (&model1);
8076
8077 model0.canonicalize (NULL);
8078 model1.canonicalize (NULL);
8079 ASSERT_EQ (model0, model1);
8080 }
8081
8082 /* Verify that region models for
8083 x = 42; y = 113;
8084 and
8085 y = 113; x = 42;
8086 are equal after canonicalization. */
8087
8088 static void
8089 test_canonicalization_2 ()
8090 {
8091 tree int_42 = build_int_cst (integer_type_node, 42);
8092 tree int_113 = build_int_cst (integer_type_node, 113);
8093 tree x = build_global_decl ("x", integer_type_node);
8094 tree y = build_global_decl ("y", integer_type_node);
8095
8096 region_model model0;
8097 model0.set_value (model0.get_lvalue (x, NULL),
8098 model0.get_rvalue (int_42, NULL),
8099 NULL);
8100 model0.set_value (model0.get_lvalue (y, NULL),
8101 model0.get_rvalue (int_113, NULL),
8102 NULL);
8103
8104 region_model model1;
8105 model1.set_value (model1.get_lvalue (y, NULL),
8106 model1.get_rvalue (int_113, NULL),
8107 NULL);
8108 model1.set_value (model1.get_lvalue (x, NULL),
8109 model1.get_rvalue (int_42, NULL),
8110 NULL);
8111
8112 model0.canonicalize (NULL);
8113 model1.canonicalize (NULL);
8114 ASSERT_EQ (model0, model1);
8115 }
8116
8117 /* Verify that constraints for
8118 x > 3 && y > 42
8119 and
8120 y > 42 && x > 3
8121 are equal after canonicalization. */
8122
8123 static void
8124 test_canonicalization_3 ()
8125 {
8126 tree int_3 = build_int_cst (integer_type_node, 3);
8127 tree int_42 = build_int_cst (integer_type_node, 42);
8128 tree x = build_global_decl ("x", integer_type_node);
8129 tree y = build_global_decl ("y", integer_type_node);
8130
8131 region_model model0;
8132 model0.add_constraint (x, GT_EXPR, int_3, NULL);
8133 model0.add_constraint (y, GT_EXPR, int_42, NULL);
8134
8135 region_model model1;
8136 model1.add_constraint (y, GT_EXPR, int_42, NULL);
8137 model1.add_constraint (x, GT_EXPR, int_3, NULL);
8138
8139 model0.canonicalize (NULL);
8140 model1.canonicalize (NULL);
8141 ASSERT_EQ (model0, model1);
8142 }
8143
8144 /* Verify that we can canonicalize a model containing NaN and other real
8145 constants. */
8146
8147 static void
8148 test_canonicalization_4 ()
8149 {
8150 auto_vec<tree> csts;
8151 append_interesting_constants (&csts);
8152
8153 region_model model;
8154
8155 unsigned i;
8156 tree cst;
8157 FOR_EACH_VEC_ELT (csts, i, cst)
8158 model.get_rvalue (cst, NULL);
8159
8160 model.canonicalize (NULL);
8161 }
8162
8163 /* Assert that if we have two region_model instances
8164 with values VAL_A and VAL_B for EXPR that they are
8165 mergable. Write the merged model to *OUT_MERGED_MODEL,
8166 and the merged svalue ptr to *OUT_MERGED_SVALUE.
8167 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
8168 for that region_model. */
8169
8170 static void
8171 assert_region_models_merge (tree expr, tree val_a, tree val_b,
8172 region_model *out_merged_model,
8173 svalue **out_merged_svalue)
8174 {
8175 test_region_model_context ctxt;
8176 region_model model0;
8177 region_model model1;
8178 if (val_a)
8179 model0.set_value (model0.get_lvalue (expr, &ctxt),
8180 model0.get_rvalue (val_a, &ctxt),
8181 &ctxt);
8182 if (val_b)
8183 model1.set_value (model1.get_lvalue (expr, &ctxt),
8184 model1.get_rvalue (val_b, &ctxt),
8185 &ctxt);
8186
8187 /* They should be mergeable. */
8188 ASSERT_TRUE (model0.can_merge_with_p (model1, out_merged_model));
8189
8190 svalue_id merged_svalue_sid = out_merged_model->get_rvalue (expr, &ctxt);
8191 *out_merged_svalue = out_merged_model->get_svalue (merged_svalue_sid);
8192 }
8193
8194 /* Verify that we can merge region_model instances. */
8195
8196 static void
8197 test_state_merging ()
8198 {
8199 tree int_42 = build_int_cst (integer_type_node, 42);
8200 tree int_113 = build_int_cst (integer_type_node, 113);
8201 tree x = build_global_decl ("x", integer_type_node);
8202 tree y = build_global_decl ("y", integer_type_node);
8203 tree z = build_global_decl ("z", integer_type_node);
8204 tree p = build_global_decl ("p", ptr_type_node);
8205
8206 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
8207 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
8208
8209 auto_vec <tree> param_types;
8210 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
8211 allocate_struct_function (test_fndecl, true);
8212
8213 /* Param "a". */
8214 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8215 get_identifier ("a"),
8216 integer_type_node);
8217 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
8218
8219 /* Param "q", a pointer. */
8220 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8221 get_identifier ("q"),
8222 ptr_type_node);
8223
8224 {
8225 region_model model0;
8226 region_model model1;
8227 region_model merged;
8228 /* Verify empty models can be merged. */
8229 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8230 ASSERT_EQ (model0, merged);
8231 }
8232
8233 /* Verify that we can merge two contradictory constraints on the
8234 value for a global. */
8235 /* TODO: verify that the merged model doesn't have a value for
8236 the global */
8237 {
8238 region_model model0;
8239 region_model model1;
8240 region_model merged;
8241 test_region_model_context ctxt;
8242 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8243 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
8244 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8245 ASSERT_NE (model0, merged);
8246 ASSERT_NE (model1, merged);
8247 }
8248
8249 /* Verify handling of a PARM_DECL. */
8250 {
8251 test_region_model_context ctxt;
8252 region_model model0;
8253 region_model model1;
8254 ASSERT_EQ (model0.get_stack_depth (), 0);
8255 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
8256 ASSERT_EQ (model0.get_stack_depth (), 1);
8257 ASSERT_EQ (model0.get_function_at_depth (0),
8258 DECL_STRUCT_FUNCTION (test_fndecl));
8259 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
8260
8261 svalue_id sid_a
8262 = model0.set_to_new_unknown_value (model0.get_lvalue (a, &ctxt),
8263 integer_type_node, &ctxt);
8264 model1.set_to_new_unknown_value (model1.get_lvalue (a, &ctxt),
8265 integer_type_node, &ctxt);
8266 ASSERT_EQ (model0, model1);
8267
8268 /* Check that get_value_by_name works for locals. */
8269 ASSERT_EQ (model0.get_value_by_name ("a"), sid_a);
8270
8271 /* They should be mergeable, and the result should be the same. */
8272 region_model merged;
8273 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8274 ASSERT_EQ (model0, merged);
8275 /* In particular, there should be an unknown value for "a". */
8276 svalue *merged_a_sval = merged.get_svalue (merged.get_rvalue (a, &ctxt));
8277 ASSERT_EQ (merged_a_sval->get_kind (), SK_UNKNOWN);
8278 }
8279
8280 /* Verify handling of a global. */
8281 {
8282 test_region_model_context ctxt;
8283 region_model model0;
8284 region_model model1;
8285 svalue_id sid_x
8286 = model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt),
8287 integer_type_node, &ctxt);
8288 model1.set_to_new_unknown_value (model1.get_lvalue (x, &ctxt),
8289 integer_type_node, &ctxt);
8290 ASSERT_EQ (model0, model1);
8291
8292 /* Check that get_value_by_name works for globals. */
8293 ASSERT_EQ (model0.get_value_by_name ("x"), sid_x);
8294
8295 /* They should be mergeable, and the result should be the same. */
8296 region_model merged;
8297 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8298 ASSERT_EQ (model0, merged);
8299 /* In particular, there should be an unknown value for "x". */
8300 svalue *merged_x_sval = merged.get_svalue (merged.get_rvalue (x, &ctxt));
8301 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8302 }
8303
8304 /* Use global-handling to verify various combinations of values. */
8305
8306 /* Two equal constant values. */
8307 {
8308 region_model merged;
8309 svalue *merged_x_sval;
8310 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
8311
8312 /* In particular, there should be a constant value for "x". */
8313 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
8314 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
8315 int_42);
8316 }
8317
8318 /* Two non-equal constant values. */
8319 {
8320 region_model merged;
8321 svalue *merged_x_sval;
8322 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
8323
8324 /* In particular, there should be an unknown value for "x". */
8325 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8326 }
8327
8328 /* Uninit and constant. */
8329 {
8330 region_model merged;
8331 svalue *merged_x_sval;
8332 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
8333
8334 /* In particular, there should be an unknown value for "x". */
8335 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8336 }
8337
8338 /* Constant and uninit. */
8339 {
8340 region_model merged;
8341 svalue *merged_x_sval;
8342 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
8343
8344 /* In particular, there should be an unknown value for "x". */
8345 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8346 }
8347
8348 /* Unknown and constant. */
8349 // TODO
8350
8351 /* Pointers: NULL and NULL. */
8352 // TODO
8353
8354 /* Pointers: NULL and non-NULL. */
8355 // TODO
8356
8357 /* Pointers: non-NULL and non-NULL: ptr to a local. */
8358 {
8359 region_model model0;
8360 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8361 model0.set_to_new_unknown_value (model0.get_lvalue (a, NULL),
8362 integer_type_node, NULL);
8363 model0.set_value (model0.get_lvalue (p, NULL),
8364 model0.get_rvalue (addr_of_a, NULL), NULL);
8365
8366 region_model model1 (model0);
8367 ASSERT_EQ (model0, model1);
8368
8369 /* They should be mergeable, and the result should be the same. */
8370 region_model merged;
8371 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8372 ASSERT_EQ (model0, merged);
8373 }
8374
8375 /* Pointers: non-NULL and non-NULL: ptr to a global. */
8376 {
8377 region_model merged;
8378 /* p == &y in both input models. */
8379 svalue *merged_p_sval;
8380 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
8381 &merged_p_sval);
8382
8383 /* We should get p == &y in the merged model. */
8384 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
8385 region_svalue *merged_p_ptr = merged_p_sval->dyn_cast_region_svalue ();
8386 region_id merged_p_star_rid = merged_p_ptr->get_pointee ();
8387 ASSERT_EQ (merged_p_star_rid, merged.get_lvalue (y, NULL));
8388 }
8389
8390 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
8391 {
8392 region_model merged;
8393 /* x == &y vs x == &z in the input models. */
8394 svalue *merged_x_sval;
8395 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
8396 &merged_x_sval);
8397
8398 /* We should get x == unknown in the merged model. */
8399 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8400 }
8401
8402 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
8403 {
8404 test_region_model_context ctxt;
8405 region_model model0;
8406 region_id new_rid = model0.add_new_malloc_region ();
8407 svalue_id ptr_sid
8408 = model0.get_or_create_ptr_svalue (ptr_type_node, new_rid);
8409 model0.set_value (model0.get_lvalue (p, &ctxt),
8410 ptr_sid, &ctxt);
8411 model0.canonicalize (&ctxt);
8412
8413 region_model model1 (model0);
8414
8415 ASSERT_EQ (model0, model1);
8416
8417 region_model merged;
8418 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8419
8420 merged.canonicalize (&ctxt);
8421
8422 /* The merged model ought to be identical (after canonicalization,
8423 at least). */
8424 ASSERT_EQ (model0, merged);
8425 }
8426
8427 /* Two regions sharing the same unknown svalue should continue sharing
8428 an unknown svalue after self-merger. */
8429 {
8430 test_region_model_context ctxt;
8431 region_model model0;
8432 svalue_id sid
8433 = model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt),
8434 integer_type_node, &ctxt);
8435 model0.set_value (model0.get_lvalue (y, &ctxt), sid, &ctxt);
8436 region_model model1 (model0);
8437
8438 /* They should be mergeable, and the result should be the same. */
8439 region_model merged;
8440 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8441 ASSERT_EQ (model0, merged);
8442
8443 /* In particular, we should have x == y. */
8444 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
8445 tristate (tristate::TS_TRUE));
8446 }
8447
8448 #if 0
8449 {
8450 region_model model0;
8451 region_model model1;
8452 test_region_model_context ctxt;
8453 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8454 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
8455 ASSERT_TRUE (model0.can_merge_with_p (model1));
8456 }
8457
8458 {
8459 region_model model0;
8460 region_model model1;
8461 test_region_model_context ctxt;
8462 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8463 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
8464 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
8465 ASSERT_TRUE (model0.can_merge_with_p (model1));
8466 }
8467 #endif
8468
8469 // TODO: what can't we merge? need at least one such test
8470
8471 /* TODO: various things
8472 - heap regions
8473 - value merging:
8474 - every combination, but in particular
8475 - pairs of regions
8476 */
8477
8478 /* Views. */
8479 {
8480 test_region_model_context ctxt;
8481 region_model model0;
8482
8483 region_id x_rid = model0.get_lvalue (x, &ctxt);
8484 region_id x_as_ptr = model0.get_or_create_view (x_rid, ptr_type_node,
8485 &ctxt);
8486 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
8487
8488 region_model model1 (model0);
8489 ASSERT_EQ (model1, model0);
8490
8491 /* They should be mergeable, and the result should be the same. */
8492 region_model merged;
8493 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8494 }
8495
8496 /* Verify that we can merge a model in which a local in an older stack
8497 frame points to a local in a more recent stack frame. */
8498 {
8499 region_model model0;
8500 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8501 region_id q_in_first_frame = model0.get_lvalue (q, NULL);
8502
8503 /* Push a second frame. */
8504 region_id rid_2nd_frame
8505 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8506
8507 /* Have a pointer in the older frame point to a local in the
8508 more recent frame. */
8509 svalue_id sid_ptr = model0.get_rvalue (addr_of_a, NULL);
8510 model0.set_value (q_in_first_frame, sid_ptr, NULL);
8511
8512 /* Verify that it's pointing at the newer frame. */
8513 region_id rid_pointee
8514 = model0.get_svalue (sid_ptr)->dyn_cast_region_svalue ()->get_pointee ();
8515 ASSERT_EQ (model0.get_region (rid_pointee)->get_parent (), rid_2nd_frame);
8516
8517 model0.canonicalize (NULL);
8518
8519 region_model model1 (model0);
8520 ASSERT_EQ (model0, model1);
8521
8522 /* They should be mergeable, and the result should be the same
8523 (after canonicalization, at least). */
8524 region_model merged;
8525 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8526 merged.canonicalize (NULL);
8527 ASSERT_EQ (model0, merged);
8528 }
8529
8530 /* Verify that we can merge a model in which a local points to a global. */
8531 {
8532 region_model model0;
8533 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8534 model0.set_value (model0.get_lvalue (q, NULL),
8535 model0.get_rvalue (addr_of_y, NULL), NULL);
8536
8537 model0.canonicalize (NULL);
8538
8539 region_model model1 (model0);
8540 ASSERT_EQ (model0, model1);
8541
8542 /* They should be mergeable, and the result should be the same
8543 (after canonicalization, at least). */
8544 region_model merged;
8545 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8546 merged.canonicalize (NULL);
8547 ASSERT_EQ (model0, merged);
8548 }
8549 }
8550
8551 /* Verify that constraints are correctly merged when merging region_model
8552 instances. */
8553
8554 static void
8555 test_constraint_merging ()
8556 {
8557 tree int_0 = build_int_cst (integer_type_node, 0);
8558 tree int_5 = build_int_cst (integer_type_node, 5);
8559 tree x = build_global_decl ("x", integer_type_node);
8560 tree y = build_global_decl ("y", integer_type_node);
8561 tree z = build_global_decl ("z", integer_type_node);
8562 tree n = build_global_decl ("n", integer_type_node);
8563
8564 test_region_model_context ctxt;
8565
8566 /* model0: 0 <= (x == y) < n. */
8567 region_model model0;
8568 model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt),
8569 integer_type_node, &ctxt);
8570 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
8571 model0.add_constraint (x, GE_EXPR, int_0, NULL);
8572 model0.add_constraint (x, LT_EXPR, n, NULL);
8573
8574 /* model1: z != 5 && (0 <= x < n). */
8575 region_model model1;
8576 model1.set_to_new_unknown_value (model1.get_lvalue (x, &ctxt),
8577 integer_type_node, &ctxt);
8578 model1.add_constraint (z, NE_EXPR, int_5, NULL);
8579 model1.add_constraint (x, GE_EXPR, int_0, NULL);
8580 model1.add_constraint (x, LT_EXPR, n, NULL);
8581
8582 /* They should be mergeable; the merged constraints should
8583 be: (0 <= x < n). */
8584 region_model merged;
8585 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8586
8587 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
8588 tristate (tristate::TS_TRUE));
8589 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
8590 tristate (tristate::TS_TRUE));
8591
8592 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
8593 tristate (tristate::TS_UNKNOWN));
8594 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
8595 tristate (tristate::TS_UNKNOWN));
8596 }
8597
8598 /* Run all of the selftests within this file. */
8599
8600 void
8601 analyzer_region_model_cc_tests ()
8602 {
8603 test_tree_cmp_on_constants ();
8604 test_dump ();
8605 test_dump_2 ();
8606 test_dump_3 ();
8607 test_get_representative_tree ();
8608 test_unique_constants ();
8609 test_svalue_equality ();
8610 test_region_equality ();
8611 test_purging_by_criteria ();
8612 test_purge_unused_svalues ();
8613 test_assignment ();
8614 test_stack_frames ();
8615 test_get_representative_path_var ();
8616 test_canonicalization_1 ();
8617 test_canonicalization_2 ();
8618 test_canonicalization_3 ();
8619 test_canonicalization_4 ();
8620 test_state_merging ();
8621 test_constraint_merging ();
8622 }
8623
8624 } // namespace selftest
8625
8626 #endif /* CHECKING_P */
8627
8628 } // namespace ana
8629
8630 #endif /* #if ENABLE_ANALYZER */