analyzer: implement region_model::get_representative_path_var for labels
[gcc.git] / gcc / analyzer / region-model-manager.cc
1 /* Consolidation of svalues and regions.
2 Copyright (C) 2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "diagnostic-core.h"
26 #include "gimple-pretty-print.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "tristate.h"
42 #include "bitmap.h"
43 #include "selftest.h"
44 #include "function.h"
45 #include "json.h"
46 #include "analyzer/analyzer.h"
47 #include "analyzer/analyzer-logging.h"
48 #include "ordered-hash-map.h"
49 #include "options.h"
50 #include "cgraph.h"
51 #include "cfg.h"
52 #include "digraph.h"
53 #include "analyzer/supergraph.h"
54 #include "sbitmap.h"
55 #include "analyzer/call-string.h"
56 #include "analyzer/program-point.h"
57 #include "analyzer/store.h"
58 #include "analyzer/region-model.h"
59
60 #if ENABLE_ANALYZER
61
62 namespace ana {
63
64 /* class region_model_manager. */
65
66 /* region_model_manager's ctor. */
67
68 region_model_manager::region_model_manager ()
69 : m_next_region_id (0),
70 m_root_region (alloc_region_id ()),
71 m_stack_region (alloc_region_id (), &m_root_region),
72 m_heap_region (alloc_region_id (), &m_root_region),
73 m_unknown_NULL (NULL),
74 m_max_complexity (0, 0),
75 m_code_region (alloc_region_id (), &m_root_region),
76 m_fndecls_map (), m_labels_map (),
77 m_globals_region (alloc_region_id (), &m_root_region),
78 m_globals_map (),
79 m_store_mgr (this)
80 {
81 }
82
83 /* region_model_manager's dtor. Delete all of the managed svalues
84 and regions. */
85
86 region_model_manager::~region_model_manager ()
87 {
88 /* Delete consolidated svalues. */
89 for (constants_map_t::iterator iter = m_constants_map.begin ();
90 iter != m_constants_map.end (); ++iter)
91 delete (*iter).second;
92 for (unknowns_map_t::iterator iter = m_unknowns_map.begin ();
93 iter != m_unknowns_map.end (); ++iter)
94 delete (*iter).second;
95 delete m_unknown_NULL;
96 for (setjmp_values_map_t::iterator iter = m_setjmp_values_map.begin ();
97 iter != m_setjmp_values_map.end (); ++iter)
98 delete (*iter).second;
99 for (poisoned_values_map_t::iterator iter = m_poisoned_values_map.begin ();
100 iter != m_poisoned_values_map.end (); ++iter)
101 delete (*iter).second;
102 for (initial_values_map_t::iterator iter = m_initial_values_map.begin ();
103 iter != m_initial_values_map.end (); ++iter)
104 delete (*iter).second;
105 for (pointer_values_map_t::iterator iter = m_pointer_values_map.begin ();
106 iter != m_pointer_values_map.end (); ++iter)
107 delete (*iter).second;
108 for (unaryop_values_map_t::iterator iter = m_unaryop_values_map.begin ();
109 iter != m_unaryop_values_map.end (); ++iter)
110 delete (*iter).second;
111 for (binop_values_map_t::iterator iter = m_binop_values_map.begin ();
112 iter != m_binop_values_map.end (); ++iter)
113 delete (*iter).second;
114 for (sub_values_map_t::iterator iter = m_sub_values_map.begin ();
115 iter != m_sub_values_map.end (); ++iter)
116 delete (*iter).second;
117 for (unmergeable_values_map_t::iterator iter
118 = m_unmergeable_values_map.begin ();
119 iter != m_unmergeable_values_map.end (); ++iter)
120 delete (*iter).second;
121 for (widening_values_map_t::iterator iter = m_widening_values_map.begin ();
122 iter != m_widening_values_map.end (); ++iter)
123 delete (*iter).second;
124 for (compound_values_map_t::iterator iter = m_compound_values_map.begin ();
125 iter != m_compound_values_map.end (); ++iter)
126 delete (*iter).second;
127 for (conjured_values_map_t::iterator iter = m_conjured_values_map.begin ();
128 iter != m_conjured_values_map.end (); ++iter)
129 delete (*iter).second;
130
131 /* Delete consolidated regions. */
132 for (fndecls_map_t::iterator iter = m_fndecls_map.begin ();
133 iter != m_fndecls_map.end (); ++iter)
134 delete (*iter).second;
135 for (labels_map_t::iterator iter = m_labels_map.begin ();
136 iter != m_labels_map.end (); ++iter)
137 delete (*iter).second;
138 for (globals_map_t::iterator iter = m_globals_map.begin ();
139 iter != m_globals_map.end (); ++iter)
140 delete (*iter).second;
141 for (string_map_t::iterator iter = m_string_map.begin ();
142 iter != m_string_map.end (); ++iter)
143 delete (*iter).second;
144 }
145
146 /* Return true if C exceeds the complexity limit for svalues. */
147
148 bool
149 region_model_manager::too_complex_p (const complexity &c) const
150 {
151 if (c.m_max_depth > (unsigned)param_analyzer_max_svalue_depth)
152 return true;
153 return false;
154 }
155
156 /* If SVAL exceeds the complexity limit for svalues, delete it
157 and return true.
158 Otherwise update m_max_complexity and return false. */
159
160 bool
161 region_model_manager::reject_if_too_complex (svalue *sval)
162 {
163 const complexity &c = sval->get_complexity ();
164 if (!too_complex_p (c))
165 {
166 if (m_max_complexity.m_num_nodes < c.m_num_nodes)
167 m_max_complexity.m_num_nodes = c.m_num_nodes;
168 if (m_max_complexity.m_max_depth < c.m_max_depth)
169 m_max_complexity.m_max_depth = c.m_max_depth;
170 return false;
171 }
172
173 delete sval;
174 return true;
175 }
176
177 /* Macro for imposing a complexity limit on svalues, for use within
178 region_model_manager member functions.
179
180 If SVAL exceeds the complexity limit, delete it and return an UNKNOWN
181 value of the same type.
182 Otherwise update m_max_complexity and carry on. */
183
184 #define RETURN_UNKNOWN_IF_TOO_COMPLEX(SVAL) \
185 do { \
186 svalue *sval_ = (SVAL); \
187 tree type_ = sval_->get_type (); \
188 if (reject_if_too_complex (sval_)) \
189 return get_or_create_unknown_svalue (type_); \
190 } while (0)
191
192 /* svalue consolidation. */
193
194 /* Return the svalue * for a constant_svalue for CST_EXPR,
195 creating it if necessary.
196 The constant_svalue instances are reused, based on pointer equality
197 of trees */
198
199 const svalue *
200 region_model_manager::get_or_create_constant_svalue (tree cst_expr)
201 {
202 gcc_assert (cst_expr);
203
204 constant_svalue **slot = m_constants_map.get (cst_expr);
205 if (slot)
206 return *slot;
207 constant_svalue *cst_sval = new constant_svalue (cst_expr);
208 RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
209 m_constants_map.put (cst_expr, cst_sval);
210 return cst_sval;
211 }
212
213 /* Return the svalue * for a unknown_svalue for TYPE (which can be NULL),
214 creating it if necessary.
215 The unknown_svalue instances are reused, based on pointer equality
216 of the types */
217
218 const svalue *
219 region_model_manager::get_or_create_unknown_svalue (tree type)
220 {
221 /* Special-case NULL, so that the hash_map can use NULL as the
222 "empty" value. */
223 if (type == NULL_TREE)
224 {
225 if (!m_unknown_NULL)
226 m_unknown_NULL = new unknown_svalue (type);
227 return m_unknown_NULL;
228 }
229
230 unknown_svalue **slot = m_unknowns_map.get (type);
231 if (slot)
232 return *slot;
233 unknown_svalue *sval = new unknown_svalue (type);
234 m_unknowns_map.put (type, sval);
235 return sval;
236 }
237
238 /* Return the svalue * for the initial value of REG, creating it if
239 necessary. */
240
241 const svalue *
242 region_model_manager::get_or_create_initial_value (const region *reg)
243 {
244 /* The initial value of a cast is a cast of the initial value. */
245 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
246 {
247 const region *original_reg = cast_reg->get_original_region ();
248 return get_or_create_cast (cast_reg->get_type (),
249 get_or_create_initial_value (original_reg));
250 }
251
252 if (initial_svalue **slot = m_initial_values_map.get (reg))
253 return *slot;
254 initial_svalue *initial_sval = new initial_svalue (reg->get_type (), reg);
255 RETURN_UNKNOWN_IF_TOO_COMPLEX (initial_sval);
256 m_initial_values_map.put (reg, initial_sval);
257 return initial_sval;
258 }
259
260 /* Return the svalue * for R using type TYPE, creating it if
261 necessary. */
262
263 const svalue *
264 region_model_manager::get_or_create_setjmp_svalue (const setjmp_record &r,
265 tree type)
266 {
267 setjmp_svalue::key_t key (r, type);
268 if (setjmp_svalue **slot = m_setjmp_values_map.get (key))
269 return *slot;
270 setjmp_svalue *setjmp_sval = new setjmp_svalue (r, type);
271 RETURN_UNKNOWN_IF_TOO_COMPLEX (setjmp_sval);
272 m_setjmp_values_map.put (key, setjmp_sval);
273 return setjmp_sval;
274 }
275
276 /* Return the svalue * for a poisoned value of KIND and TYPE, creating it if
277 necessary. */
278
279 const svalue *
280 region_model_manager::get_or_create_poisoned_svalue (enum poison_kind kind,
281 tree type)
282 {
283 poisoned_svalue::key_t key (kind, type);
284 if (poisoned_svalue **slot = m_poisoned_values_map.get (key))
285 return *slot;
286 poisoned_svalue *poisoned_sval = new poisoned_svalue (kind, type);
287 RETURN_UNKNOWN_IF_TOO_COMPLEX (poisoned_sval);
288 m_poisoned_values_map.put (key, poisoned_sval);
289 return poisoned_sval;
290 }
291
292 /* Return the svalue * for a pointer to POINTEE of type PTR_TYPE,
293 creating it if necessary. */
294
295 const svalue *
296 region_model_manager::get_ptr_svalue (tree ptr_type, const region *pointee)
297 {
298 /* If this is a symbolic region from dereferencing a pointer, and the types
299 match, then return the original pointer. */
300 if (const symbolic_region *sym_reg = pointee->dyn_cast_symbolic_region ())
301 if (ptr_type == sym_reg->get_pointer ()->get_type ())
302 return sym_reg->get_pointer ();
303
304 region_svalue::key_t key (ptr_type, pointee);
305 if (region_svalue **slot = m_pointer_values_map.get (key))
306 return *slot;
307 region_svalue *sval = new region_svalue (ptr_type, pointee);
308 RETURN_UNKNOWN_IF_TOO_COMPLEX (sval);
309 m_pointer_values_map.put (key, sval);
310 return sval;
311 }
312
313 /* Subroutine of region_model_manager::get_or_create_unaryop.
314 Attempt to fold the inputs and return a simpler svalue *.
315 Otherwise, return NULL. */
316
317 const svalue *
318 region_model_manager::maybe_fold_unaryop (tree type, enum tree_code op,
319 const svalue *arg)
320 {
321 /* Ops on "unknown" are also unknown. */
322 if (arg->get_kind () == SK_UNKNOWN)
323 return get_or_create_unknown_svalue (type);
324
325 switch (op)
326 {
327 default: break;
328 case VIEW_CONVERT_EXPR:
329 case NOP_EXPR:
330 {
331 /* Handle redundant casts. */
332 if (arg->get_type ()
333 && useless_type_conversion_p (arg->get_type (), type))
334 return arg;
335
336 /* Fold "cast<TYPE> (cast <INNER_TYPE> (innermost_arg))
337 => "cast<TYPE> (innermost_arg)",
338 unless INNER_TYPE is narrower than TYPE. */
339 if (const svalue *innermost_arg = arg->maybe_undo_cast ())
340 {
341 tree inner_type = arg->get_type ();
342 if (TYPE_SIZE (type)
343 && TYPE_SIZE (inner_type)
344 && (fold_binary (LE_EXPR, boolean_type_node,
345 TYPE_SIZE (type), TYPE_SIZE (inner_type))
346 == boolean_true_node))
347 return maybe_fold_unaryop (type, op, innermost_arg);
348 }
349 }
350 break;
351 case TRUTH_NOT_EXPR:
352 {
353 /* Invert comparisons e.g. "!(x == y)" => "x != y". */
354 if (const binop_svalue *binop = arg->dyn_cast_binop_svalue ())
355 if (TREE_CODE_CLASS (binop->get_op ()) == tcc_comparison)
356 {
357 enum tree_code inv_op
358 = invert_tree_comparison (binop->get_op (),
359 HONOR_NANS (binop->get_type ()));
360 if (inv_op != ERROR_MARK)
361 return get_or_create_binop (binop->get_type (), inv_op,
362 binop->get_arg0 (),
363 binop->get_arg1 ());
364 }
365 }
366 break;
367 }
368
369 /* Constants. */
370 if (tree cst = arg->maybe_get_constant ())
371 if (tree result = fold_unary (op, type, cst))
372 return get_or_create_constant_svalue (result);
373
374 return NULL;
375 }
376
377 /* Return the svalue * for an unary operation OP on ARG with a result of
378 type TYPE, creating it if necessary. */
379
380 const svalue *
381 region_model_manager::get_or_create_unaryop (tree type, enum tree_code op,
382 const svalue *arg)
383 {
384 if (const svalue *folded = maybe_fold_unaryop (type, op, arg))
385 return folded;
386 unaryop_svalue::key_t key (type, op, arg);
387 if (unaryop_svalue **slot = m_unaryop_values_map.get (key))
388 return *slot;
389 unaryop_svalue *unaryop_sval = new unaryop_svalue (type, op, arg);
390 RETURN_UNKNOWN_IF_TOO_COMPLEX (unaryop_sval);
391 m_unaryop_values_map.put (key, unaryop_sval);
392 return unaryop_sval;
393 }
394
395 /* Get a tree code for a cast to DST_TYPE from SRC_TYPE.
396 Use NOP_EXPR if possible (e.g. to help fold_unary convert casts
397 of 0 to (T*) to simple pointer constants), but use FIX_TRUNC_EXPR
398 and VIEW_CONVERT_EXPR for cases that fold_unary would otherwise crash
399 on. */
400
401 static enum tree_code
402 get_code_for_cast (tree dst_type, tree src_type)
403 {
404 gcc_assert (dst_type);
405 if (!src_type)
406 return NOP_EXPR;
407
408 if (TREE_CODE (src_type) == REAL_TYPE)
409 {
410 if (TREE_CODE (dst_type) == INTEGER_TYPE)
411 return FIX_TRUNC_EXPR;
412 else
413 return VIEW_CONVERT_EXPR;
414 }
415
416 return NOP_EXPR;
417 }
418
419 /* Return the svalue * for a cast of ARG to type TYPE, creating it
420 if necessary. */
421
422 const svalue *
423 region_model_manager::get_or_create_cast (tree type, const svalue *arg)
424 {
425 gcc_assert (type);
426 enum tree_code op = get_code_for_cast (type, arg->get_type ());
427 return get_or_create_unaryop (type, op, arg);
428 }
429
430 /* Subroutine of region_model_manager::get_or_create_binop.
431 Attempt to fold the inputs and return a simpler svalue *.
432 Otherwise, return NULL. */
433
434 const svalue *
435 region_model_manager::maybe_fold_binop (tree type, enum tree_code op,
436 const svalue *arg0,
437 const svalue *arg1)
438 {
439 tree cst0 = arg0->maybe_get_constant ();
440 tree cst1 = arg1->maybe_get_constant ();
441 /* (CST OP CST). */
442 if (cst0 && cst1)
443 {
444 if (tree result = fold_binary (op, type, cst0, cst1))
445 if (CONSTANT_CLASS_P (result))
446 return get_or_create_constant_svalue (result);
447 }
448
449 if (FLOAT_TYPE_P (type)
450 || (arg0->get_type () && FLOAT_TYPE_P (arg0->get_type ()))
451 || (arg1->get_type () && FLOAT_TYPE_P (arg1->get_type ())))
452 return NULL;
453
454 switch (op)
455 {
456 default:
457 break;
458 case POINTER_PLUS_EXPR:
459 case PLUS_EXPR:
460 /* (VAL + 0) -> VAL. */
461 if (cst1 && zerop (cst1) && type == arg0->get_type ())
462 return arg0;
463 break;
464 case MINUS_EXPR:
465 /* (VAL - 0) -> VAL. */
466 if (cst1 && zerop (cst1) && type == arg0->get_type ())
467 return arg0;
468 break;
469 case MULT_EXPR:
470 /* (VAL * 0). */
471 if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
472 return get_or_create_constant_svalue (build_int_cst (type, 0));
473 /* (VAL * 1) -> VAL. */
474 if (cst1 && integer_onep (cst1))
475 return arg0;
476 break;
477 case BIT_AND_EXPR:
478 if (cst1)
479 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
480 /* "(ARG0 & 0)" -> "0". */
481 return get_or_create_constant_svalue (build_int_cst (type, 0));
482 break;
483 case TRUTH_ANDIF_EXPR:
484 case TRUTH_AND_EXPR:
485 if (cst1)
486 {
487 if (zerop (cst1) && INTEGRAL_TYPE_P (type))
488 /* "(ARG0 && 0)" -> "0". */
489 return get_or_create_constant_svalue (build_int_cst (type, 0));
490 else
491 /* "(ARG0 && nonzero-cst)" -> "ARG0". */
492 return get_or_create_cast (type, arg0);
493 }
494 break;
495 case TRUTH_ORIF_EXPR:
496 case TRUTH_OR_EXPR:
497 if (cst1)
498 {
499 if (zerop (cst1))
500 /* "(ARG0 || 0)" -> "ARG0". */
501 return get_or_create_cast (type, arg0);
502 else
503 /* "(ARG0 && nonzero-cst)" -> "nonzero-cst". */
504 return get_or_create_cast (type, arg1);
505 }
506 break;
507 }
508
509 /* For associative ops, fold "(X op CST_A) op CST_B)" to
510 "X op (CST_A op CST_B)". */
511 if (cst1 && associative_tree_code (op))
512 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
513 if (binop->get_op () == op
514 && binop->get_arg1 ()->maybe_get_constant ()
515 && type == binop->get_type ()
516 && type == binop->get_arg0 ()->get_type ()
517 && type == binop->get_arg1 ()->get_type ())
518 return get_or_create_binop
519 (type, op, binop->get_arg0 (),
520 get_or_create_binop (type, op,
521 binop->get_arg1 (), arg1));
522
523 /* associative_tree_code is false for POINTER_PLUS_EXPR, but we
524 can fold:
525 "(PTR ptr+ CST_A) ptr+ CST_B)" to "PTR ptr+ (CST_A ptr+ CST_B)"
526 e.g. in data-model-1.c: test_4c. */
527 if (cst1 && op == POINTER_PLUS_EXPR)
528 if (const binop_svalue *binop = arg0->dyn_cast_binop_svalue ())
529 if (binop->get_op () == POINTER_PLUS_EXPR)
530 if (binop->get_arg1 ()->maybe_get_constant ())
531 return get_or_create_binop
532 (type, op, binop->get_arg0 (),
533 get_or_create_binop (size_type_node, op,
534 binop->get_arg1 (), arg1));
535
536 /* Ops on "unknown" are also unknown (unless we can use one of the
537 identities above). */
538 if (arg0->get_kind () == SK_UNKNOWN
539 || arg1->get_kind () == SK_UNKNOWN)
540 return get_or_create_unknown_svalue (type);
541
542 /* etc. */
543
544 return NULL;
545 }
546
547 /* Return the svalue * for an binary operation OP on ARG0 and ARG1
548 with a result of type TYPE, creating it if necessary. */
549
550 const svalue *
551 region_model_manager::get_or_create_binop (tree type, enum tree_code op,
552 const svalue *arg0,
553 const svalue *arg1)
554 {
555 /* For commutative ops, put any constant on the RHS. */
556 if (arg0->maybe_get_constant () && commutative_tree_code (op))
557 std::swap (arg0, arg1);
558
559 if (const svalue *folded = maybe_fold_binop (type, op, arg0, arg1))
560 return folded;
561
562 binop_svalue::key_t key (type, op, arg0, arg1);
563 if (binop_svalue **slot = m_binop_values_map.get (key))
564 return *slot;
565 binop_svalue *binop_sval = new binop_svalue (type, op, arg0, arg1);
566 RETURN_UNKNOWN_IF_TOO_COMPLEX (binop_sval);
567 m_binop_values_map.put (key, binop_sval);
568 return binop_sval;
569 }
570
571 /* Subroutine of region_model_manager::get_or_create_sub_svalue.
572 Return a folded svalue, or NULL. */
573
574 const svalue *
575 region_model_manager::maybe_fold_sub_svalue (tree type,
576 const svalue *parent_svalue,
577 const region *subregion)
578 {
579 /* Subvalues of "unknown" are unknown. */
580 if (parent_svalue->get_kind () == SK_UNKNOWN)
581 return get_or_create_unknown_svalue (type);
582
583 /* If we have a subregion of a zero-fill, it's zero. */
584 if (const unaryop_svalue *unary
585 = parent_svalue->dyn_cast_unaryop_svalue ())
586 {
587 if (unary->get_op () == NOP_EXPR
588 || unary->get_op () == VIEW_CONVERT_EXPR)
589 if (tree cst = unary->get_arg ()->maybe_get_constant ())
590 if (zerop (cst))
591 {
592 const svalue *cst_sval
593 = get_or_create_constant_svalue (cst);
594 return get_or_create_cast (type, cst_sval);
595 }
596 }
597
598 /* Handle getting individual chars from a STRING_CST. */
599 if (tree cst = parent_svalue->maybe_get_constant ())
600 if (TREE_CODE (cst) == STRING_CST)
601 if (const element_region *element_reg
602 = subregion->dyn_cast_element_region ())
603 {
604 const svalue *idx_sval = element_reg->get_index ();
605 if (tree cst_idx = idx_sval->maybe_get_constant ())
606 if (const svalue *char_sval
607 = maybe_get_char_from_string_cst (cst, cst_idx))
608 return get_or_create_cast (type, char_sval);
609 }
610
611 /* SUB(INIT(r)).FIELD -> INIT(r.FIELD)
612 i.e.
613 Subvalue(InitialValue(R1), FieldRegion(R2, F))
614 -> InitialValue(FieldRegion(R1, F)). */
615 if (const initial_svalue *init_sval
616 = parent_svalue->dyn_cast_initial_svalue ())
617 {
618 if (const field_region *field_reg = subregion->dyn_cast_field_region ())
619 {
620 const region *field_reg_new
621 = get_field_region (init_sval->get_region (),
622 field_reg->get_field ());
623 return get_or_create_initial_value (field_reg_new);
624 }
625 }
626
627 return NULL;
628 }
629
630 /* Return the svalue * for extracting a subvalue of type TYPE from
631 PARENT_SVALUE based on SUBREGION, creating it if necessary. */
632
633 const svalue *
634 region_model_manager::get_or_create_sub_svalue (tree type,
635 const svalue *parent_svalue,
636 const region *subregion)
637 {
638 if (const svalue *folded
639 = maybe_fold_sub_svalue (type, parent_svalue, subregion))
640 return folded;
641
642 sub_svalue::key_t key (type, parent_svalue, subregion);
643 if (sub_svalue **slot = m_sub_values_map.get (key))
644 return *slot;
645 sub_svalue *sub_sval
646 = new sub_svalue (type, parent_svalue, subregion);
647 RETURN_UNKNOWN_IF_TOO_COMPLEX (sub_sval);
648 m_sub_values_map.put (key, sub_sval);
649 return sub_sval;
650 }
651
652 /* Return the svalue * that decorates ARG as being unmergeable,
653 creating it if necessary. */
654
655 const svalue *
656 region_model_manager::get_or_create_unmergeable (const svalue *arg)
657 {
658 if (arg->get_kind () == SK_UNMERGEABLE)
659 return arg;
660
661 if (unmergeable_svalue **slot = m_unmergeable_values_map.get (arg))
662 return *slot;
663 unmergeable_svalue *unmergeable_sval = new unmergeable_svalue (arg);
664 RETURN_UNKNOWN_IF_TOO_COMPLEX (unmergeable_sval);
665 m_unmergeable_values_map.put (arg, unmergeable_sval);
666 return unmergeable_sval;
667 }
668
669 /* Return the svalue * of type TYPE for the merger of value BASE_SVAL
670 and ITER_SVAL at POINT, creating it if necessary. */
671
672 const svalue *
673 region_model_manager::get_or_create_widening_svalue (tree type,
674 const program_point &point,
675 const svalue *base_sval,
676 const svalue *iter_sval)
677 {
678 gcc_assert (base_sval->get_kind () != SK_WIDENING);
679 gcc_assert (iter_sval->get_kind () != SK_WIDENING);
680 widening_svalue::key_t key (type, point, base_sval, iter_sval);
681 if (widening_svalue **slot = m_widening_values_map.get (key))
682 return *slot;
683 widening_svalue *widening_sval
684 = new widening_svalue (type, point, base_sval, iter_sval);
685 RETURN_UNKNOWN_IF_TOO_COMPLEX (widening_sval);
686 m_widening_values_map.put (key, widening_sval);
687 return widening_sval;
688 }
689
690 /* Return the svalue * of type TYPE for the compound values in MAP,
691 creating it if necessary. */
692
693 const svalue *
694 region_model_manager::get_or_create_compound_svalue (tree type,
695 const binding_map &map)
696 {
697 compound_svalue::key_t tmp_key (type, &map);
698 if (compound_svalue **slot = m_compound_values_map.get (tmp_key))
699 return *slot;
700 compound_svalue *compound_sval
701 = new compound_svalue (type, map);
702 RETURN_UNKNOWN_IF_TOO_COMPLEX (compound_sval);
703 /* Use make_key rather than reusing the key, so that we use a
704 ptr to compound_sval's binding_map, rather than the MAP param. */
705 m_compound_values_map.put (compound_sval->make_key (), compound_sval);
706 return compound_sval;
707 }
708
709 /* Return the svalue * of type TYPE for the value conjured for ID_REG
710 at STMT, creating it if necessary. */
711
712 const svalue *
713 region_model_manager::get_or_create_conjured_svalue (tree type,
714 const gimple *stmt,
715 const region *id_reg)
716 {
717 conjured_svalue::key_t key (type, stmt, id_reg);
718 if (conjured_svalue **slot = m_conjured_values_map.get (key))
719 return *slot;
720 conjured_svalue *conjured_sval
721 = new conjured_svalue (type, stmt, id_reg);
722 RETURN_UNKNOWN_IF_TOO_COMPLEX (conjured_sval);
723 m_conjured_values_map.put (key, conjured_sval);
724 return conjured_sval;
725 }
726
727 /* Given STRING_CST, a STRING_CST and BYTE_OFFSET_CST a constant,
728 attempt to get the character at that offset, returning either
729 the svalue for the character constant, or NULL if unsuccessful. */
730
731 const svalue *
732 region_model_manager::maybe_get_char_from_string_cst (tree string_cst,
733 tree byte_offset_cst)
734 {
735 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
736
737 /* Adapted from fold_read_from_constant_string. */
738 scalar_int_mode char_mode;
739 if (TREE_CODE (byte_offset_cst) == INTEGER_CST
740 && compare_tree_int (byte_offset_cst,
741 TREE_STRING_LENGTH (string_cst)) < 0
742 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string_cst))),
743 &char_mode)
744 && GET_MODE_SIZE (char_mode) == 1)
745 {
746 tree char_cst
747 = build_int_cst_type (TREE_TYPE (TREE_TYPE (string_cst)),
748 (TREE_STRING_POINTER (string_cst)
749 [TREE_INT_CST_LOW (byte_offset_cst)]));
750 return get_or_create_constant_svalue (char_cst);
751 }
752 return NULL;
753 }
754
755 /* region consolidation. */
756
757 /* Return the region for FNDECL, creating it if necessary. */
758
759 const function_region *
760 region_model_manager::get_region_for_fndecl (tree fndecl)
761 {
762 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
763
764 function_region **slot = m_fndecls_map.get (fndecl);
765 if (slot)
766 return *slot;
767 function_region *reg
768 = new function_region (alloc_region_id (), &m_code_region, fndecl);
769 m_fndecls_map.put (fndecl, reg);
770 return reg;
771 }
772
773 /* Return the region for LABEL, creating it if necessary. */
774
775 const label_region *
776 region_model_manager::get_region_for_label (tree label)
777 {
778 gcc_assert (TREE_CODE (label) == LABEL_DECL);
779
780 label_region **slot = m_labels_map.get (label);
781 if (slot)
782 return *slot;
783
784 tree fndecl = DECL_CONTEXT (label);
785 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
786
787 const function_region *func_reg = get_region_for_fndecl (fndecl);
788 label_region *reg
789 = new label_region (alloc_region_id (), func_reg, label);
790 m_labels_map.put (label, reg);
791 return reg;
792 }
793
794 /* Return the region for EXPR, creating it if necessary. */
795
796 const decl_region *
797 region_model_manager::get_region_for_global (tree expr)
798 {
799 gcc_assert (TREE_CODE (expr) == VAR_DECL);
800
801 decl_region **slot = m_globals_map.get (expr);
802 if (slot)
803 return *slot;
804 decl_region *reg
805 = new decl_region (alloc_region_id (), &m_globals_region, expr);
806 m_globals_map.put (expr, reg);
807 return reg;
808 }
809
810 /* Return the region that describes accessing field FIELD of PARENT,
811 creating it if necessary. */
812
813 const region *
814 region_model_manager::get_field_region (const region *parent, tree field)
815 {
816 gcc_assert (TREE_CODE (field) == FIELD_DECL);
817
818 field_region::key_t key (parent, field);
819 if (field_region *reg = m_field_regions.get (key))
820 return reg;
821
822 field_region *field_reg
823 = new field_region (alloc_region_id (), parent, field);
824 m_field_regions.put (key, field_reg);
825 return field_reg;
826 }
827
828 /* Return the region that describes accessing the element of type
829 ELEMENT_TYPE at index INDEX of PARENT, creating it if necessary. */
830
831 const region *
832 region_model_manager::get_element_region (const region *parent,
833 tree element_type,
834 const svalue *index)
835 {
836 element_region::key_t key (parent, element_type, index);
837 if (element_region *reg = m_element_regions.get (key))
838 return reg;
839
840 element_region *element_reg
841 = new element_region (alloc_region_id (), parent, element_type, index);
842 m_element_regions.put (key, element_reg);
843 return element_reg;
844 }
845
846 /* Return the region that describes accessing the subregion of type
847 ELEMENT_TYPE at offset BYTE_OFFSET within PARENT, creating it if
848 necessary. */
849
850 const region *
851 region_model_manager::get_offset_region (const region *parent,
852 tree type,
853 const svalue *byte_offset)
854 {
855 /* If BYTE_OFFSET is zero, return PARENT. */
856 if (tree cst_offset = byte_offset->maybe_get_constant ())
857 if (zerop (cst_offset))
858 return get_cast_region (parent, type);
859
860 /* Fold OFFSET_REGION(OFFSET_REGION(REG, X), Y)
861 to OFFSET_REGION(REG, (X + Y)). */
862 if (const offset_region *parent_offset_reg
863 = parent->dyn_cast_offset_region ())
864 {
865 const svalue *sval_x = parent_offset_reg->get_byte_offset ();
866 const svalue *sval_sum
867 = get_or_create_binop (byte_offset->get_type (),
868 PLUS_EXPR, sval_x, byte_offset);
869 return get_offset_region (parent->get_parent_region (), type, sval_sum);
870 }
871
872 offset_region::key_t key (parent, type, byte_offset);
873 if (offset_region *reg = m_offset_regions.get (key))
874 return reg;
875
876 offset_region *offset_reg
877 = new offset_region (alloc_region_id (), parent, type, byte_offset);
878 m_offset_regions.put (key, offset_reg);
879 return offset_reg;
880 }
881
882 /* Return the region that describes accessing PARENT_REGION as if
883 it were of type TYPE, creating it if necessary. */
884
885 const region *
886 region_model_manager::get_cast_region (const region *original_region,
887 tree type)
888 {
889 /* If types match, return ORIGINAL_REGION. */
890 if (type == original_region->get_type ())
891 return original_region;
892
893 cast_region::key_t key (original_region, type);
894 if (cast_region *reg = m_cast_regions.get (key))
895 return reg;
896
897 cast_region *cast_reg
898 = new cast_region (alloc_region_id (), original_region, type);
899 m_cast_regions.put (key, cast_reg);
900 return cast_reg;
901 }
902
903 /* Return the frame_region for call to FUN from CALLING_FRAME, creating it
904 if necessary. CALLING_FRAME may be NULL. */
905
906 const frame_region *
907 region_model_manager::get_frame_region (const frame_region *calling_frame,
908 function *fun)
909 {
910 int index = calling_frame ? calling_frame->get_index () + 1 : 0;
911
912 frame_region::key_t key (calling_frame, fun);
913 if (frame_region *reg = m_frame_regions.get (key))
914 return reg;
915
916 frame_region *frame_reg
917 = new frame_region (alloc_region_id (), &m_stack_region, calling_frame,
918 fun, index);
919 m_frame_regions.put (key, frame_reg);
920 return frame_reg;
921 }
922
923 /* Return the region that describes dereferencing SVAL, creating it
924 if necessary. */
925
926 const region *
927 region_model_manager::get_symbolic_region (const svalue *sval)
928 {
929 symbolic_region::key_t key (&m_root_region, sval);
930 if (symbolic_region *reg = m_symbolic_regions.get (key))
931 return reg;
932
933 symbolic_region *symbolic_reg
934 = new symbolic_region (alloc_region_id (), &m_root_region, sval);
935 m_symbolic_regions.put (key, symbolic_reg);
936 return symbolic_reg;
937 }
938
939 /* Return the region that describes accessing STRING_CST, creating it
940 if necessary. */
941
942 const string_region *
943 region_model_manager::get_region_for_string (tree string_cst)
944 {
945 gcc_assert (TREE_CODE (string_cst) == STRING_CST);
946
947 string_region **slot = m_string_map.get (string_cst);
948 if (slot)
949 return *slot;
950 string_region *reg
951 = new string_region (alloc_region_id (), &m_root_region, string_cst);
952 m_string_map.put (string_cst, reg);
953 return reg;
954 }
955
956 /* If we see a tree code we don't know how to handle, rather than
957 ICE or generate bogus results, create a dummy region, and notify
958 CTXT so that it can mark the new state as being not properly
959 modelled. The exploded graph can then stop exploring that path,
960 since any diagnostics we might issue will have questionable
961 validity. */
962
963 const region *
964 region_model_manager::
965 get_region_for_unexpected_tree_code (region_model_context *ctxt,
966 tree t,
967 const dump_location_t &loc)
968 {
969 tree type = TYPE_P (t) ? t : TREE_TYPE (t);
970 region *new_reg
971 = new unknown_region (alloc_region_id (), &m_root_region, type);
972 if (ctxt)
973 ctxt->on_unexpected_tree_code (t, loc);
974 return new_reg;
975 }
976
977 /* Return a new region describing a heap-allocated block of memory. */
978
979 const region *
980 region_model_manager::create_region_for_heap_alloc ()
981 {
982 region *reg
983 = new heap_allocated_region (alloc_region_id (), &m_heap_region);
984 m_managed_dynamic_regions.safe_push (reg);
985 return reg;
986 }
987
988 /* Return a new region describing a block of memory allocated within FRAME. */
989
990 const region *
991 region_model_manager::create_region_for_alloca (const frame_region *frame)
992 {
993 gcc_assert (frame);
994 region *reg = new alloca_region (alloc_region_id (), frame);
995 m_managed_dynamic_regions.safe_push (reg);
996 return reg;
997 }
998
999 /* Log OBJ to LOGGER. */
1000
1001 template <typename T>
1002 static void
1003 log_managed_object (logger *logger, const T *obj)
1004 {
1005 logger->start_log_line ();
1006 pretty_printer *pp = logger->get_printer ();
1007 pp_string (pp, " ");
1008 obj->dump_to_pp (pp, true);
1009 logger->end_log_line ();
1010 }
1011
1012 /* Specialization for frame_region, which also logs the count of locals
1013 managed by the frame_region. */
1014
1015 template <>
1016 void
1017 log_managed_object (logger *logger, const frame_region *obj)
1018 {
1019 logger->start_log_line ();
1020 pretty_printer *pp = logger->get_printer ();
1021 pp_string (pp, " ");
1022 obj->dump_to_pp (pp, true);
1023 pp_printf (pp, " [with %i region(s) for locals]", obj->get_num_locals ());
1024 logger->end_log_line ();
1025 }
1026
1027 /* Dump the number of objects that were managed by UNIQ_MAP to LOGGER.
1028 If SHOW_OBJS is true, also dump the objects themselves. */
1029
1030 template <typename K, typename T>
1031 static void
1032 log_uniq_map (logger *logger, bool show_objs, const char *title,
1033 const hash_map<K, T*> &uniq_map)
1034 {
1035 logger->log (" # %s: %li", title, uniq_map.elements ());
1036 if (show_objs)
1037 for (typename hash_map<K, T*>::iterator iter = uniq_map.begin ();
1038 iter != uniq_map.end (); ++iter)
1039 {
1040 T *managed_obj = (*iter).second;
1041 log_managed_object<T> (logger, managed_obj);
1042 }
1043 }
1044
1045 /* Dump the number of objects that were managed by MAP to LOGGER.
1046 If SHOW_OBJS is true, also dump the objects themselves. */
1047
1048 template <typename T>
1049 static void
1050 log_uniq_map (logger *logger, bool show_objs, const char *title,
1051 const consolidation_map<T> &map)
1052 {
1053 logger->log (" # %s: %li", title, map.elements ());
1054 if (show_objs)
1055 for (typename consolidation_map<T>::iterator iter = map.begin ();
1056 iter != map.end (); ++iter)
1057 {
1058 T *managed_obj = (*iter).second;
1059 log_managed_object<T> (logger, managed_obj);
1060 }
1061 }
1062
1063 /* Dump the number of objects of each class that were managed by this
1064 manager to LOGGER.
1065 If SHOW_OBJS is true, also dump the objects themselves. */
1066
1067 void
1068 region_model_manager::log_stats (logger *logger, bool show_objs) const
1069 {
1070 LOG_SCOPE (logger);
1071 logger->log ("svalue consolidation");
1072 log_uniq_map (logger, show_objs, "constant_svalue", m_constants_map);
1073 log_uniq_map (logger, show_objs, "unknown_svalue", m_unknowns_map);
1074 if (m_unknown_NULL)
1075 log_managed_object (logger, m_unknown_NULL);
1076 log_uniq_map (logger, show_objs, "poisoned_svalue", m_poisoned_values_map);
1077 log_uniq_map (logger, show_objs, "setjmp_svalue", m_setjmp_values_map);
1078 log_uniq_map (logger, show_objs, "initial_svalue", m_initial_values_map);
1079 log_uniq_map (logger, show_objs, "region_svalue", m_pointer_values_map);
1080 log_uniq_map (logger, show_objs, "unaryop_svalue", m_unaryop_values_map);
1081 log_uniq_map (logger, show_objs, "binop_svalue", m_binop_values_map);
1082 log_uniq_map (logger, show_objs, "sub_svalue", m_sub_values_map);
1083 log_uniq_map (logger, show_objs, "unmergeable_svalue",
1084 m_unmergeable_values_map);
1085 log_uniq_map (logger, show_objs, "widening_svalue", m_widening_values_map);
1086 log_uniq_map (logger, show_objs, "compound_svalue", m_compound_values_map);
1087 log_uniq_map (logger, show_objs, "conjured_svalue", m_conjured_values_map);
1088 logger->log ("max accepted svalue num_nodes: %i",
1089 m_max_complexity.m_num_nodes);
1090 logger->log ("max accepted svalue max_depth: %i",
1091 m_max_complexity.m_max_depth);
1092
1093 logger->log ("region consolidation");
1094 logger->log (" next region id: %i", m_next_region_id);
1095 log_uniq_map (logger, show_objs, "function_region", m_fndecls_map);
1096 log_uniq_map (logger, show_objs, "label_region", m_labels_map);
1097 log_uniq_map (logger, show_objs, "decl_region for globals", m_globals_map);
1098 log_uniq_map (logger, show_objs, "field_region", m_field_regions);
1099 log_uniq_map (logger, show_objs, "element_region", m_element_regions);
1100 log_uniq_map (logger, show_objs, "offset_region", m_offset_regions);
1101 log_uniq_map (logger, show_objs, "cast_region", m_cast_regions);
1102 log_uniq_map (logger, show_objs, "frame_region", m_frame_regions);
1103 log_uniq_map (logger, show_objs, "symbolic_region", m_symbolic_regions);
1104 log_uniq_map (logger, show_objs, "string_region", m_string_map);
1105 logger->log (" # managed dynamic regions: %i",
1106 m_managed_dynamic_regions.length ());
1107 m_store_mgr.log_stats (logger, show_objs);
1108 }
1109
1110 /* Dump the number of objects of each class that were managed by this
1111 manager to LOGGER.
1112 If SHOW_OBJS is true, also dump the objects themselves.
1113 This is here so it can use log_uniq_map. */
1114
1115 void
1116 store_manager::log_stats (logger *logger, bool show_objs) const
1117 {
1118 LOG_SCOPE (logger);
1119 log_uniq_map (logger, show_objs, "concrete_binding",
1120 m_concrete_binding_key_mgr);
1121 log_uniq_map (logger, show_objs, "symbolic_binding",
1122 m_symbolic_binding_key_mgr);
1123 }
1124
1125 } // namespace ana
1126
1127 #endif /* #if ENABLE_ANALYZER */