analyzer: eliminate sm_context::warn_for_state in favor of a new 'warn' vfunc
[gcc.git] / gcc / analyzer / diagnostic-manager.cc
1 /* Classes for saving, deduplicating, and emitting analyzer diagnostics.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "pretty-print.h"
26 #include "gcc-rich-location.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "diagnostic-core.h"
30 #include "diagnostic-event-id.h"
31 #include "diagnostic-path.h"
32 #include "alloc-pool.h"
33 #include "fibonacci_heap.h"
34 #include "shortest-paths.h"
35 #include "sbitmap.h"
36 #include "bitmap.h"
37 #include "tristate.h"
38 #include "selftest.h"
39 #include "ordered-hash-map.h"
40 #include "analyzer/analyzer.h"
41 #include "analyzer/analyzer-logging.h"
42 #include "analyzer/sm.h"
43 #include "analyzer/pending-diagnostic.h"
44 #include "analyzer/diagnostic-manager.h"
45 #include "analyzer/call-string.h"
46 #include "analyzer/program-point.h"
47 #include "analyzer/store.h"
48 #include "analyzer/region-model.h"
49 #include "analyzer/constraint-manager.h"
50 #include "cfg.h"
51 #include "basic-block.h"
52 #include "gimple.h"
53 #include "gimple-iterator.h"
54 #include "cgraph.h"
55 #include "digraph.h"
56 #include "analyzer/supergraph.h"
57 #include "analyzer/program-state.h"
58 #include "analyzer/exploded-graph.h"
59 #include "analyzer/checker-path.h"
60 #include "analyzer/reachability.h"
61
62 #if ENABLE_ANALYZER
63
64 namespace ana {
65
66 /* class saved_diagnostic. */
67
68 /* saved_diagnostic's ctor.
69 Take ownership of D and STMT_FINDER. */
70
71 saved_diagnostic::saved_diagnostic (const state_machine *sm,
72 const exploded_node *enode,
73 const supernode *snode, const gimple *stmt,
74 stmt_finder *stmt_finder,
75 tree var,
76 const svalue *sval,
77 state_machine::state_t state,
78 pending_diagnostic *d)
79 : m_sm (sm), m_enode (enode), m_snode (snode), m_stmt (stmt),
80 /* stmt_finder could be on-stack; we want our own copy that can
81 outlive that. */
82 m_stmt_finder (stmt_finder ? stmt_finder->clone () : NULL),
83 m_var (var), m_sval (sval), m_state (state),
84 m_d (d), m_trailing_eedge (NULL),
85 m_status (STATUS_NEW), m_epath_length (0), m_problem (NULL)
86 {
87 gcc_assert (m_stmt || m_stmt_finder);
88
89 /* We must have an enode in order to be able to look for paths
90 through the exploded_graph to this diagnostic. */
91 gcc_assert (m_enode);
92 }
93
94 /* saved_diagnostic's dtor. */
95
96 saved_diagnostic::~saved_diagnostic ()
97 {
98 delete m_stmt_finder;
99 delete m_d;
100 delete m_problem;
101 }
102
103 bool
104 saved_diagnostic::operator== (const saved_diagnostic &other) const
105 {
106 return (m_sm == other.m_sm
107 /* We don't compare m_enode. */
108 && m_snode == other.m_snode
109 && m_stmt == other.m_stmt
110 /* We don't compare m_stmt_finder. */
111 && pending_diagnostic::same_tree_p (m_var, other.m_var)
112 && m_state == other.m_state
113 && m_d->equal_p (*other.m_d)
114 && m_trailing_eedge == other.m_trailing_eedge);
115 }
116
117 /* State for building a checker_path from a particular exploded_path.
118 In particular, this precomputes reachability information: the set of
119 source enodes for which a path be found to the diagnostic enode. */
120
121 class path_builder
122 {
123 public:
124 path_builder (const exploded_graph &eg,
125 const exploded_path &epath)
126 : m_eg (eg),
127 m_diag_enode (epath.get_final_enode ()),
128 m_reachability (eg, m_diag_enode)
129 {}
130
131 const exploded_node *get_diag_node () const { return m_diag_enode; }
132
133 bool reachable_from_p (const exploded_node *src_enode) const
134 {
135 return m_reachability.reachable_from_p (src_enode);
136 }
137
138 const extrinsic_state &get_ext_state () const { return m_eg.get_ext_state (); }
139
140 private:
141 typedef reachability<eg_traits> enode_reachability;
142
143 const exploded_graph &m_eg;
144
145 /* The enode where the diagnostic occurs. */
146 const exploded_node *m_diag_enode;
147
148 /* Precompute all enodes from which the diagnostic is reachable. */
149 enode_reachability m_reachability;
150 };
151
152 /* class diagnostic_manager. */
153
154 /* diagnostic_manager's ctor. */
155
156 diagnostic_manager::diagnostic_manager (logger *logger, engine *eng,
157 int verbosity)
158 : log_user (logger), m_eng (eng), m_verbosity (verbosity)
159 {
160 }
161
162 /* Queue pending_diagnostic D at ENODE for later emission. */
163
164 void
165 diagnostic_manager::add_diagnostic (const state_machine *sm,
166 const exploded_node *enode,
167 const supernode *snode, const gimple *stmt,
168 stmt_finder *finder,
169 tree var,
170 const svalue *sval,
171 state_machine::state_t state,
172 pending_diagnostic *d)
173 {
174 LOG_FUNC (get_logger ());
175
176 /* We must have an enode in order to be able to look for paths
177 through the exploded_graph to the diagnostic. */
178 gcc_assert (enode);
179
180 saved_diagnostic *sd
181 = new saved_diagnostic (sm, enode, snode, stmt, finder, var, sval,
182 state, d);
183 m_saved_diagnostics.safe_push (sd);
184 if (get_logger ())
185 log ("adding saved diagnostic %i at SN %i: %qs",
186 m_saved_diagnostics.length () - 1,
187 snode->m_index, d->get_kind ());
188 }
189
190 /* Queue pending_diagnostic D at ENODE for later emission. */
191
192 void
193 diagnostic_manager::add_diagnostic (const exploded_node *enode,
194 const supernode *snode, const gimple *stmt,
195 stmt_finder *finder,
196 pending_diagnostic *d)
197 {
198 gcc_assert (enode);
199 add_diagnostic (NULL, enode, snode, stmt, finder, NULL_TREE, NULL, 0, d);
200 }
201
202 /* A class for identifying sets of duplicated pending_diagnostic.
203
204 We want to find the simplest dedupe_candidate amongst those that share a
205 dedupe_key. */
206
207 class dedupe_key
208 {
209 public:
210 dedupe_key (const saved_diagnostic &sd,
211 const exploded_path &epath)
212 : m_sd (sd), m_stmt (sd.m_stmt)
213 {
214 /* Support deferring the choice of stmt until after an emission path has
215 been built, using an optional stmt_finder. */
216 if (m_stmt == NULL)
217 {
218 gcc_assert (sd.m_stmt_finder);
219 m_stmt = sd.m_stmt_finder->find_stmt (epath);
220 }
221 gcc_assert (m_stmt);
222 }
223
224 hashval_t hash () const
225 {
226 inchash::hash hstate;
227 hstate.add_ptr (m_stmt);
228 // TODO: m_sd
229 return hstate.end ();
230 }
231 bool operator== (const dedupe_key &other) const
232 {
233 return (m_sd == other.m_sd
234 && m_stmt == other.m_stmt);
235 }
236
237 location_t get_location () const
238 {
239 return m_stmt->location;
240 }
241
242 /* A qsort comparator for use by dedupe_winners::emit_best
243 to sort them into location_t order. */
244
245 static int
246 comparator (const void *p1, const void *p2)
247 {
248 const dedupe_key *pk1 = *(const dedupe_key * const *)p1;
249 const dedupe_key *pk2 = *(const dedupe_key * const *)p2;
250
251 location_t loc1 = pk1->get_location ();
252 location_t loc2 = pk2->get_location ();
253
254 return linemap_compare_locations (line_table, loc2, loc1);
255 }
256
257 const saved_diagnostic &m_sd;
258 const gimple *m_stmt;
259 };
260
261 /* The value of a slot for a dedupe_key within dedupe_winners:
262 the exploded_path for the best candidate for that key, and the
263 number of duplicates seen so far. */
264
265 class dedupe_candidate
266 {
267 public:
268 // has the exploded_path
269 dedupe_candidate (const shortest_exploded_paths &sp,
270 saved_diagnostic *sd)
271 : m_epath (sp.get_shortest_path (sd->m_enode)),
272 m_num_dupes (0)
273 {
274 }
275
276 unsigned length () const { return m_epath.length (); }
277 const exploded_path &get_path () const { return m_epath; }
278
279 void add_duplicate () { m_num_dupes++; }
280 int get_num_dupes () const { return m_num_dupes; }
281
282 private:
283 exploded_path m_epath;
284 public:
285 int m_num_dupes;
286 };
287
288 /* Traits for use by dedupe_winners. */
289
290 class dedupe_hash_map_traits
291 {
292 public:
293 typedef const dedupe_key *key_type;
294 typedef dedupe_candidate *value_type;
295 typedef dedupe_candidate *compare_type;
296
297 static inline hashval_t hash (const key_type &v)
298 {
299 return v->hash ();
300 }
301 static inline bool equal_keys (const key_type &k1, const key_type &k2)
302 {
303 return *k1 == *k2;
304 }
305 template <typename T>
306 static inline void remove (T &)
307 {
308 // TODO
309 }
310 template <typename T>
311 static inline void mark_deleted (T &entry)
312 {
313 entry.m_key = reinterpret_cast<key_type> (1);
314 }
315 template <typename T>
316 static inline void mark_empty (T &entry)
317 {
318 entry.m_key = NULL;
319 }
320 template <typename T>
321 static inline bool is_deleted (const T &entry)
322 {
323 return entry.m_key == reinterpret_cast<key_type> (1);
324 }
325 template <typename T>
326 static inline bool is_empty (const T &entry)
327 {
328 return entry.m_key == NULL;
329 }
330 static const bool empty_zero_p = true;
331 };
332
333 /* A class for deduplicating diagnostics and finding (and emitting) the
334 best diagnostic within each partition. */
335
336 class dedupe_winners
337 {
338 public:
339 dedupe_winners (engine *eng) : m_engine (eng) {}
340
341 ~dedupe_winners ()
342 {
343 /* Delete all keys and candidates. */
344 for (map_t::iterator iter = m_map.begin ();
345 iter != m_map.end ();
346 ++iter)
347 {
348 delete (*iter).first;
349 delete (*iter).second;
350 }
351 }
352
353 /* Determine an exploded_path for SD using SP and, if it's feasible,
354 determine if it's the best seen so far for its dedupe_key.
355 Retain the winner for each dedupe_key, and discard the rest. */
356
357 void add (logger *logger,
358 const shortest_exploded_paths &sp,
359 const exploded_graph *eg,
360 saved_diagnostic *sd)
361 {
362 /* Build a dedupe_candidate for SD.
363 This uses SP to build an exploded_path. */
364 dedupe_candidate *dc = new dedupe_candidate (sp, sd);
365
366 sd->set_epath_length (dc->length ());
367
368 /* Verify that the epath is feasible.
369 State-merging means that not every path in the epath corresponds
370 to a feasible one w.r.t. states.
371 Here we simply check each duplicate saved_diagnostic's
372 shortest_path, and reject any that aren't feasible.
373 This could introduce false negatives, as there could be longer
374 feasible paths within the egraph. */
375 if (logger)
376 logger->log ("considering %qs at EN: %i, SN: %i",
377 sd->m_d->get_kind (), sd->m_enode->m_index,
378 sd->m_snode->m_index);
379
380 feasibility_problem *p = NULL;
381 if (!dc->get_path ().feasible_p (logger, &p, m_engine, eg))
382 {
383 if (logger)
384 logger->log ("rejecting %qs at EN: %i, SN: %i"
385 " due to infeasible path",
386 sd->m_d->get_kind (), sd->m_enode->m_index,
387 sd->m_snode->m_index);
388 sd->set_infeasible (p);
389 delete dc;
390 return;
391 }
392 else
393 if (logger)
394 logger->log ("accepting %qs at EN: %i, SN: %i with feasible path",
395 sd->m_d->get_kind (), sd->m_enode->m_index,
396 sd->m_snode->m_index);
397
398 sd->set_feasible ();
399
400 dedupe_key *key = new dedupe_key (*sd, dc->get_path ());
401 if (dedupe_candidate **slot = m_map.get (key))
402 {
403 if (logger)
404 logger->log ("already have this dedupe_key");
405
406 (*slot)->add_duplicate ();
407
408 if (dc->length () < (*slot)->length ())
409 {
410 /* We've got a shorter path for the key; replace
411 the current candidate. */
412 if (logger)
413 logger->log ("length %i is better than existing length %i;"
414 " taking over this dedupe_key",
415 dc->length (), (*slot)->length ());
416 dc->m_num_dupes = (*slot)->get_num_dupes ();
417 delete *slot;
418 *slot = dc;
419 }
420 else
421 /* We haven't beaten the current best candidate;
422 drop the new candidate. */
423 {
424 if (logger)
425 logger->log ("length %i isn't better than existing length %i;"
426 " dropping this candidate",
427 dc->length (), (*slot)->length ());
428 delete dc;
429 }
430 delete key;
431 }
432 else
433 {
434 /* This is the first candidate for this key. */
435 m_map.put (key, dc);
436 if (logger)
437 logger->log ("first candidate for this dedupe_key");
438 }
439 }
440
441 /* Emit the simplest diagnostic within each set. */
442
443 void emit_best (diagnostic_manager *dm,
444 const exploded_graph &eg)
445 {
446 LOG_SCOPE (dm->get_logger ());
447
448 /* Get keys into a vec for sorting. */
449 auto_vec<const dedupe_key *> keys (m_map.elements ());
450 for (map_t::iterator iter = m_map.begin ();
451 iter != m_map.end ();
452 ++iter)
453 keys.quick_push ((*iter).first);
454
455 dm->log ("# keys after de-duplication: %i", keys.length ());
456
457 /* Sort into a good emission order. */
458 keys.qsort (dedupe_key::comparator);
459
460 /* Emit the best candidate for each key. */
461 int i;
462 const dedupe_key *key;
463 FOR_EACH_VEC_ELT (keys, i, key)
464 {
465 dedupe_candidate **slot = m_map.get (key);
466 gcc_assert (*slot);
467 const dedupe_candidate &dc = **slot;
468
469 dm->emit_saved_diagnostic (eg, key->m_sd,
470 dc.get_path (), key->m_stmt,
471 dc.get_num_dupes ());
472 }
473 }
474
475 private:
476 engine *m_engine;
477
478 /* This maps from each dedupe_key to a current best dedupe_candidate. */
479
480 typedef hash_map<const dedupe_key *, dedupe_candidate *,
481 dedupe_hash_map_traits> map_t;
482 map_t m_map;
483 };
484
485 /* Emit all saved diagnostics. */
486
487 void
488 diagnostic_manager::emit_saved_diagnostics (const exploded_graph &eg)
489 {
490 LOG_SCOPE (get_logger ());
491 auto_timevar tv (TV_ANALYZER_DIAGNOSTICS);
492 log ("# saved diagnostics: %i", m_saved_diagnostics.length ());
493 if (get_logger ())
494 {
495 unsigned i;
496 saved_diagnostic *sd;
497 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
498 log ("[%i] sd: %qs at EN: %i, SN: %i",
499 i, sd->m_d->get_kind (), sd->m_enode->m_index,
500 sd->m_snode->m_index);
501 }
502
503 if (m_saved_diagnostics.length () == 0)
504 return;
505
506 /* Compute the shortest_paths once, sharing it between all diagnostics. */
507 shortest_exploded_paths sp (eg, eg.get_origin ());
508
509 /* Iterate through all saved diagnostics, adding them to a dedupe_winners
510 instance. This partitions the saved diagnostics by dedupe_key,
511 generating exploded_paths for them, and retaining the best one in each
512 partition. */
513 dedupe_winners best_candidates (eg.get_engine ());
514
515 int i;
516 saved_diagnostic *sd;
517 FOR_EACH_VEC_ELT (m_saved_diagnostics, i, sd)
518 best_candidates.add (get_logger (), sp, &eg, sd);
519
520 /* For each dedupe-key, call emit_saved_diagnostic on the "best"
521 saved_diagnostic. */
522 best_candidates.emit_best (this, eg);
523 }
524
525 /* Given a saved_diagnostic SD at STMT with feasible path EPATH through EG,
526 create an checker_path of suitable events and use it to call
527 SD's underlying pending_diagnostic "emit" vfunc to emit a diagnostic. */
528
529 void
530 diagnostic_manager::emit_saved_diagnostic (const exploded_graph &eg,
531 const saved_diagnostic &sd,
532 const exploded_path &epath,
533 const gimple *stmt,
534 int num_dupes)
535 {
536 LOG_SCOPE (get_logger ());
537 log ("sd: %qs at SN: %i", sd.m_d->get_kind (), sd.m_snode->m_index);
538 log ("num dupes: %i", num_dupes);
539
540 pretty_printer *pp = global_dc->printer->clone ();
541
542 /* Precompute all enodes from which the diagnostic is reachable. */
543 path_builder pb (eg, epath);
544
545 /* This is the diagnostic_path subclass that will be built for
546 the diagnostic. */
547 checker_path emission_path;
548
549 /* Populate emission_path with a full description of EPATH. */
550 build_emission_path (pb, epath, &emission_path);
551
552 /* Now prune it to just cover the most pertinent events. */
553 prune_path (&emission_path, sd.m_sm, sd.m_sval, sd.m_state);
554
555 /* Add a final event to the path, covering the diagnostic itself.
556 We use the final enode from the epath, which might be different from
557 the sd.m_enode, as the dedupe code doesn't care about enodes, just
558 snodes. */
559 emission_path.add_final_event (sd.m_sm, epath.get_final_enode (), stmt,
560 sd.m_var, sd.m_state);
561
562 /* The "final" event might not be final; if the saved_diagnostic has a
563 trailing eedge stashed, add any events for it. This is for use
564 in handling longjmp, to show where a longjmp is rewinding to. */
565 if (sd.m_trailing_eedge)
566 add_events_for_eedge (pb, *sd.m_trailing_eedge, &emission_path);
567
568 emission_path.prepare_for_emission (sd.m_d);
569
570 gcc_rich_location rich_loc (get_stmt_location (stmt, sd.m_snode->m_fun));
571 rich_loc.set_path (&emission_path);
572
573 auto_diagnostic_group d;
574 auto_cfun sentinel (sd.m_snode->m_fun);
575 if (sd.m_d->emit (&rich_loc))
576 {
577 if (flag_analyzer_show_duplicate_count && num_dupes > 0)
578 inform_n (stmt->location, num_dupes,
579 "%i duplicate", "%i duplicates",
580 num_dupes);
581 }
582 delete pp;
583 }
584
585 /* Emit a "path" of events to EMISSION_PATH describing the exploded path
586 EPATH within EG. */
587
588 void
589 diagnostic_manager::build_emission_path (const path_builder &pb,
590 const exploded_path &epath,
591 checker_path *emission_path) const
592 {
593 LOG_SCOPE (get_logger ());
594 for (unsigned i = 0; i < epath.m_edges.length (); i++)
595 {
596 const exploded_edge *eedge = epath.m_edges[i];
597 add_events_for_eedge (pb, *eedge, emission_path);
598 }
599 }
600
601 /* Subclass of state_change_visitor that creates state_change_event
602 instances. */
603
604 class state_change_event_creator : public state_change_visitor
605 {
606 public:
607 state_change_event_creator (const exploded_edge &eedge,
608 checker_path *emission_path)
609 : m_eedge (eedge),
610 m_emission_path (emission_path)
611 {}
612
613 bool on_global_state_change (const state_machine &sm,
614 state_machine::state_t src_sm_val,
615 state_machine::state_t dst_sm_val)
616 FINAL OVERRIDE
617 {
618 const exploded_node *src_node = m_eedge.m_src;
619 const program_point &src_point = src_node->get_point ();
620 const int src_stack_depth = src_point.get_stack_depth ();
621 const exploded_node *dst_node = m_eedge.m_dest;
622 const gimple *stmt = src_point.get_stmt ();
623 const supernode *supernode = src_point.get_supernode ();
624 const program_state &dst_state = dst_node->get_state ();
625
626 int stack_depth = src_stack_depth;
627
628 m_emission_path->add_event (new state_change_event (supernode,
629 stmt,
630 stack_depth,
631 sm,
632 NULL,
633 src_sm_val,
634 dst_sm_val,
635 NULL,
636 dst_state));
637 return false;
638 }
639
640 bool on_state_change (const state_machine &sm,
641 state_machine::state_t src_sm_val,
642 state_machine::state_t dst_sm_val,
643 const svalue *sval,
644 const svalue *dst_origin_sval) FINAL OVERRIDE
645 {
646 const exploded_node *src_node = m_eedge.m_src;
647 const program_point &src_point = src_node->get_point ();
648 const int src_stack_depth = src_point.get_stack_depth ();
649 const exploded_node *dst_node = m_eedge.m_dest;
650 const gimple *stmt = src_point.get_stmt ();
651 const supernode *supernode = src_point.get_supernode ();
652 const program_state &dst_state = dst_node->get_state ();
653
654 int stack_depth = src_stack_depth;
655
656 if (m_eedge.m_sedge
657 && m_eedge.m_sedge->m_kind == SUPEREDGE_CFG_EDGE)
658 {
659 supernode = src_point.get_supernode ();
660 stmt = supernode->get_last_stmt ();
661 stack_depth = src_stack_depth;
662 }
663
664 /* Bulletproofing for state changes at calls/returns;
665 TODO: is there a better way? */
666 if (!stmt)
667 return false;
668
669 m_emission_path->add_event (new state_change_event (supernode,
670 stmt,
671 stack_depth,
672 sm,
673 sval,
674 src_sm_val,
675 dst_sm_val,
676 dst_origin_sval,
677 dst_state));
678 return false;
679 }
680
681 const exploded_edge &m_eedge;
682 checker_path *m_emission_path;
683 };
684
685 /* Compare SRC_STATE and DST_STATE (which use EXT_STATE), and call
686 VISITOR's on_state_change for every sm-state change that occurs
687 to a tree, and on_global_state_change for every global state change
688 that occurs.
689
690 This determines the state changes that ought to be reported to
691 the user: a combination of the effects of changes to sm_state_map
692 (which maps svalues to sm-states), and of region_model changes
693 (which map trees to svalues).
694
695 Bail out early and return true if any call to on_global_state_change
696 or on_state_change returns true, otherwise return false.
697
698 This is split out to make it easier to experiment with changes to
699 exploded_node granularity (so that we can observe what state changes
700 lead to state_change_events being emitted). */
701
702 bool
703 for_each_state_change (const program_state &src_state,
704 const program_state &dst_state,
705 const extrinsic_state &ext_state,
706 state_change_visitor *visitor)
707 {
708 gcc_assert (src_state.m_checker_states.length ()
709 == ext_state.get_num_checkers ());
710 gcc_assert (dst_state.m_checker_states.length ()
711 == ext_state.get_num_checkers ());
712 for (unsigned i = 0; i < ext_state.get_num_checkers (); i++)
713 {
714 const state_machine &sm = ext_state.get_sm (i);
715 const sm_state_map &src_smap = *src_state.m_checker_states[i];
716 const sm_state_map &dst_smap = *dst_state.m_checker_states[i];
717
718 /* Add events for any global state changes. */
719 if (src_smap.get_global_state () != dst_smap.get_global_state ())
720 if (visitor->on_global_state_change (sm,
721 src_smap.get_global_state (),
722 dst_smap.get_global_state ()))
723 return true;
724
725 /* Add events for per-svalue state changes. */
726 for (sm_state_map::iterator_t iter = dst_smap.begin ();
727 iter != dst_smap.end ();
728 ++iter)
729 {
730 const svalue *sval = (*iter).first;
731 state_machine::state_t dst_sm_val = (*iter).second.m_state;
732 state_machine::state_t src_sm_val
733 = src_smap.get_state (sval, ext_state);
734 if (dst_sm_val != src_sm_val)
735 {
736 const svalue *origin_sval = (*iter).second.m_origin;
737 if (visitor->on_state_change (sm, src_sm_val, dst_sm_val,
738 sval, origin_sval))
739 return true;
740 }
741 }
742 }
743 return false;
744 }
745
746 /* An sm_context for adding state_change_event on assignments to NULL,
747 where the default state isn't m_start. Storing such state in the
748 sm_state_map would lead to bloat of the exploded_graph, so we want
749 to leave it as a default state, and inject state change events here
750 when we have a diagnostic.
751 Find transitions of constants, for handling on_zero_assignment. */
752
753 struct null_assignment_sm_context : public sm_context
754 {
755 null_assignment_sm_context (int sm_idx,
756 const state_machine &sm,
757 const program_state *old_state,
758 const program_state *new_state,
759 const gimple *stmt,
760 const program_point *point,
761 checker_path *emission_path,
762 const extrinsic_state &ext_state)
763 : sm_context (sm_idx, sm), m_old_state (old_state), m_new_state (new_state),
764 m_stmt (stmt), m_point (point), m_emission_path (emission_path),
765 m_ext_state (ext_state)
766 {
767 }
768
769 tree get_fndecl_for_call (const gcall */*call*/) FINAL OVERRIDE
770 {
771 return NULL_TREE;
772 }
773
774 state_machine::state_t get_state (const gimple *stmt ATTRIBUTE_UNUSED,
775 tree var) FINAL OVERRIDE
776 {
777 const svalue *var_old_sval
778 = m_old_state->m_region_model->get_rvalue (var, NULL);
779 const sm_state_map *old_smap = m_old_state->m_checker_states[m_sm_idx];
780
781 state_machine::state_t current
782 = old_smap->get_state (var_old_sval, m_ext_state);
783
784 return current;
785 }
786
787 void set_next_state (const gimple *stmt,
788 tree var,
789 state_machine::state_t to,
790 tree origin ATTRIBUTE_UNUSED) FINAL OVERRIDE
791 {
792 state_machine::state_t from = get_state (stmt, var);
793 if (from != m_sm.get_start_state ())
794 return;
795
796 const svalue *var_new_sval
797 = m_new_state->m_region_model->get_rvalue (var, NULL);
798 const supernode *supernode = m_point->get_supernode ();
799 int stack_depth = m_point->get_stack_depth ();
800
801 m_emission_path->add_event (new state_change_event (supernode,
802 m_stmt,
803 stack_depth,
804 m_sm,
805 var_new_sval,
806 from, to,
807 NULL,
808 *m_new_state));
809 }
810
811 void warn (const supernode *, const gimple *,
812 tree, pending_diagnostic *d) FINAL OVERRIDE
813 {
814 delete d;
815 }
816
817 tree get_diagnostic_tree (tree expr) FINAL OVERRIDE
818 {
819 return expr;
820 }
821
822 state_machine::state_t get_global_state () const FINAL OVERRIDE
823 {
824 return 0;
825 }
826
827 void set_global_state (state_machine::state_t) FINAL OVERRIDE
828 {
829 /* No-op. */
830 }
831
832 void on_custom_transition (custom_transition *) FINAL OVERRIDE
833 {
834 }
835
836 tree is_zero_assignment (const gimple *stmt) FINAL OVERRIDE
837 {
838 const gassign *assign_stmt = dyn_cast <const gassign *> (stmt);
839 if (!assign_stmt)
840 return NULL_TREE;
841 if (const svalue *sval
842 = m_new_state->m_region_model->get_gassign_result (assign_stmt, NULL))
843 if (tree cst = sval->maybe_get_constant ())
844 if (::zerop(cst))
845 return gimple_assign_lhs (assign_stmt);
846 return NULL_TREE;
847 }
848
849 const program_state *m_old_state;
850 const program_state *m_new_state;
851 const gimple *m_stmt;
852 const program_point *m_point;
853 state_change_visitor *m_visitor;
854 checker_path *m_emission_path;
855 const extrinsic_state &m_ext_state;
856 };
857
858 /* Subroutine of diagnostic_manager::build_emission_path.
859 Add any events for EEDGE to EMISSION_PATH. */
860
861 void
862 diagnostic_manager::add_events_for_eedge (const path_builder &pb,
863 const exploded_edge &eedge,
864 checker_path *emission_path) const
865 {
866 const exploded_node *src_node = eedge.m_src;
867 const program_point &src_point = src_node->get_point ();
868 const exploded_node *dst_node = eedge.m_dest;
869 const program_point &dst_point = dst_node->get_point ();
870 const int dst_stack_depth = dst_point.get_stack_depth ();
871 if (get_logger ())
872 {
873 get_logger ()->start_log_line ();
874 pretty_printer *pp = get_logger ()->get_printer ();
875 pp_printf (pp, "EN %i -> EN %i: ",
876 eedge.m_src->m_index,
877 eedge.m_dest->m_index);
878 src_point.print (pp, format (false));
879 pp_string (pp, "-> ");
880 dst_point.print (pp, format (false));
881 get_logger ()->end_log_line ();
882 }
883 const program_state &src_state = src_node->get_state ();
884 const program_state &dst_state = dst_node->get_state ();
885
886 /* Add state change events for the states that have changed.
887 We add these before events for superedges, so that if we have a
888 state_change_event due to following an edge, we'll get this sequence
889 of events:
890
891 | if (!ptr)
892 | ~
893 | |
894 | (1) assuming 'ptr' is non-NULL (state_change_event)
895 | (2) following 'false' branch... (start_cfg_edge_event)
896 ...
897 | do_something (ptr);
898 | ~~~~~~~~~~~~~^~~~~
899 | |
900 | (3) ...to here (end_cfg_edge_event). */
901 state_change_event_creator visitor (eedge, emission_path);
902 for_each_state_change (src_state, dst_state, pb.get_ext_state (),
903 &visitor);
904
905 /* Allow non-standard edges to add events, e.g. when rewinding from
906 longjmp to a setjmp. */
907 if (eedge.m_custom_info)
908 eedge.m_custom_info->add_events_to_path (emission_path, eedge);
909
910 /* Add events for superedges, function entries, and for statements. */
911 switch (dst_point.get_kind ())
912 {
913 default:
914 break;
915 case PK_BEFORE_SUPERNODE:
916 if (src_point.get_kind () == PK_AFTER_SUPERNODE)
917 {
918 if (eedge.m_sedge)
919 add_events_for_superedge (pb, eedge, emission_path);
920 }
921 /* Add function entry events. */
922 if (dst_point.get_supernode ()->entry_p ())
923 {
924 emission_path->add_event
925 (new function_entry_event
926 (dst_point.get_supernode ()->get_start_location (),
927 dst_point.get_fndecl (),
928 dst_stack_depth));
929 }
930 break;
931 case PK_BEFORE_STMT:
932 {
933 const gimple *stmt = dst_point.get_stmt ();
934 const gcall *call = dyn_cast <const gcall *> (stmt);
935 if (call && is_setjmp_call_p (call))
936 emission_path->add_event
937 (new setjmp_event (stmt->location,
938 dst_node,
939 dst_point.get_fndecl (),
940 dst_stack_depth,
941 call));
942 else
943 emission_path->add_event
944 (new statement_event (stmt,
945 dst_point.get_fndecl (),
946 dst_stack_depth, dst_state));
947
948 /* Create state change events for assignment to NULL.
949 Iterate through the stmts in dst_enode, adding state change
950 events for them. */
951 if (dst_state.m_region_model)
952 {
953 program_state iter_state (dst_state);
954 program_point iter_point (dst_point);
955 while (1)
956 {
957 const gimple *stmt = iter_point.get_stmt ();
958 if (const gassign *assign = dyn_cast<const gassign *> (stmt))
959 {
960 const extrinsic_state &ext_state = pb.get_ext_state ();
961 program_state old_state (iter_state);
962 iter_state.m_region_model->on_assignment (assign, NULL);
963 for (unsigned i = 0; i < ext_state.get_num_checkers (); i++)
964 {
965 const state_machine &sm = ext_state.get_sm (i);
966 null_assignment_sm_context sm_ctxt (i, sm,
967 &old_state,
968 &iter_state,
969 stmt,
970 &iter_point,
971 emission_path,
972 pb.get_ext_state ());
973 sm.on_stmt (&sm_ctxt, dst_point.get_supernode (), stmt);
974 // TODO: what about phi nodes?
975 }
976 }
977 iter_point.next_stmt ();
978 if (iter_point.get_kind () == PK_AFTER_SUPERNODE
979 || (dst_node->m_succs.length () > 1
980 && (iter_point
981 == dst_node->m_succs[0]->m_dest->get_point ())))
982 break;
983 }
984 }
985 }
986 break;
987 }
988 }
989
990 /* Return true if EEDGE is a significant edge in the path to the diagnostic
991 for PB.
992
993 Consider all of the sibling out-eedges from the same source enode
994 as EEDGE.
995 If there's no path from the destinations of those eedges to the
996 diagnostic enode, then we have to take this eedge and thus it's
997 significant.
998
999 Conversely if there is a path from the destination of any other sibling
1000 eedge to the diagnostic enode, then this edge is insignificant.
1001
1002 Example 1: redundant if-else:
1003
1004 (A) if (...) A
1005 (B) ... / \
1006 else B C
1007 (C) ... \ /
1008 (D) [DIAGNOSTIC] D
1009
1010 D is reachable by either B or C, so neither of these edges
1011 are significant.
1012
1013 Example 2: pertinent if-else:
1014
1015 (A) if (...) A
1016 (B) ... / \
1017 else B C
1018 (C) [NECESSARY CONDITION] | |
1019 (D) [POSSIBLE DIAGNOSTIC] D1 D2
1020
1021 D becomes D1 and D2 in the exploded graph, where the diagnostic occurs
1022 at D2. D2 is only reachable via C, so the A -> C edge is significant.
1023
1024 Example 3: redundant loop:
1025
1026 (A) while (...) +-->A
1027 (B) ... | / \
1028 (C) ... +-B C
1029 (D) [DIAGNOSTIC] |
1030 D
1031
1032 D is reachable from both B and C, so the A->C edge is not significant. */
1033
1034 bool
1035 diagnostic_manager::significant_edge_p (const path_builder &pb,
1036 const exploded_edge &eedge) const
1037 {
1038 int i;
1039 exploded_edge *sibling;
1040 FOR_EACH_VEC_ELT (eedge.m_src->m_succs, i, sibling)
1041 {
1042 if (sibling == &eedge)
1043 continue;
1044 if (pb.reachable_from_p (sibling->m_dest))
1045 {
1046 if (get_logger ())
1047 get_logger ()->log (" edge EN: %i -> EN: %i is insignificant as"
1048 " EN: %i is also reachable via"
1049 " EN: %i -> EN: %i",
1050 eedge.m_src->m_index, eedge.m_dest->m_index,
1051 pb.get_diag_node ()->m_index,
1052 sibling->m_src->m_index,
1053 sibling->m_dest->m_index);
1054 return false;
1055 }
1056 }
1057
1058 return true;
1059 }
1060
1061 /* Subroutine of diagnostic_manager::add_events_for_eedge
1062 where EEDGE has an underlying superedge i.e. a CFG edge,
1063 or an interprocedural call/return.
1064 Add any events for the superedge to EMISSION_PATH. */
1065
1066 void
1067 diagnostic_manager::add_events_for_superedge (const path_builder &pb,
1068 const exploded_edge &eedge,
1069 checker_path *emission_path)
1070 const
1071 {
1072 gcc_assert (eedge.m_sedge);
1073
1074 /* Don't add events for insignificant edges at verbosity levels below 3. */
1075 if (m_verbosity < 3)
1076 if (!significant_edge_p (pb, eedge))
1077 return;
1078
1079 const exploded_node *src_node = eedge.m_src;
1080 const program_point &src_point = src_node->get_point ();
1081 const exploded_node *dst_node = eedge.m_dest;
1082 const program_point &dst_point = dst_node->get_point ();
1083 const int src_stack_depth = src_point.get_stack_depth ();
1084 const int dst_stack_depth = dst_point.get_stack_depth ();
1085 const gimple *last_stmt = src_point.get_supernode ()->get_last_stmt ();
1086
1087 switch (eedge.m_sedge->m_kind)
1088 {
1089 case SUPEREDGE_CFG_EDGE:
1090 {
1091 emission_path->add_event
1092 (new start_cfg_edge_event (eedge,
1093 (last_stmt
1094 ? last_stmt->location
1095 : UNKNOWN_LOCATION),
1096 src_point.get_fndecl (),
1097 src_stack_depth));
1098 emission_path->add_event
1099 (new end_cfg_edge_event (eedge,
1100 dst_point.get_supernode ()->get_start_location (),
1101 dst_point.get_fndecl (),
1102 dst_stack_depth));
1103 }
1104 break;
1105
1106 case SUPEREDGE_CALL:
1107 {
1108 emission_path->add_event
1109 (new call_event (eedge,
1110 (last_stmt
1111 ? last_stmt->location
1112 : UNKNOWN_LOCATION),
1113 src_point.get_fndecl (),
1114 src_stack_depth));
1115 }
1116 break;
1117
1118 case SUPEREDGE_INTRAPROCEDURAL_CALL:
1119 {
1120 /* TODO: add a subclass for this, or generate events for the
1121 summary. */
1122 emission_path->add_event
1123 (new debug_event ((last_stmt
1124 ? last_stmt->location
1125 : UNKNOWN_LOCATION),
1126 src_point.get_fndecl (),
1127 src_stack_depth,
1128 "call summary"));
1129 }
1130 break;
1131
1132 case SUPEREDGE_RETURN:
1133 {
1134 const return_superedge *return_edge
1135 = as_a <const return_superedge *> (eedge.m_sedge);
1136
1137 const gcall *call_stmt = return_edge->get_call_stmt ();
1138 emission_path->add_event
1139 (new return_event (eedge,
1140 (call_stmt
1141 ? call_stmt->location
1142 : UNKNOWN_LOCATION),
1143 dst_point.get_fndecl (),
1144 dst_stack_depth));
1145 }
1146 break;
1147 }
1148 }
1149
1150 /* Prune PATH, based on the verbosity level, to the most pertinent
1151 events for a diagnostic that involves VAR ending in state STATE
1152 (for state machine SM).
1153
1154 PATH is updated in place, and the redundant checker_events are deleted.
1155
1156 As well as deleting events, call record_critical_state on events in
1157 which state critical to the pending_diagnostic is being handled; see
1158 the comment for diagnostic_manager::prune_for_sm_diagnostic. */
1159
1160 void
1161 diagnostic_manager::prune_path (checker_path *path,
1162 const state_machine *sm,
1163 const svalue *sval,
1164 state_machine::state_t state) const
1165 {
1166 LOG_FUNC (get_logger ());
1167 path->maybe_log (get_logger (), "path");
1168 prune_for_sm_diagnostic (path, sm, sval, state);
1169 prune_interproc_events (path);
1170 finish_pruning (path);
1171 path->maybe_log (get_logger (), "pruned");
1172 }
1173
1174 /* A cheap test to determine if EXPR can be the expression of interest in
1175 an sm-diagnostic, so that we can reject cases where we have a non-lvalue.
1176 We don't have always have a model when calling this, so we can't use
1177 tentative_region_model_context, so there can be false positives. */
1178
1179 static bool
1180 can_be_expr_of_interest_p (tree expr)
1181 {
1182 if (!expr)
1183 return false;
1184
1185 /* Reject constants. */
1186 if (CONSTANT_CLASS_P (expr))
1187 return false;
1188
1189 /* Otherwise assume that it can be an lvalue. */
1190 return true;
1191 }
1192
1193 /* First pass of diagnostic_manager::prune_path: apply verbosity level,
1194 pruning unrelated state change events.
1195
1196 Iterate backwards through PATH, skipping state change events that aren't
1197 VAR but update the pertinent VAR when state-copying occurs.
1198
1199 As well as deleting events, call record_critical_state on events in
1200 which state critical to the pending_diagnostic is being handled, so
1201 that the event's get_desc vfunc can potentially supply a more precise
1202 description of the event to the user.
1203 e.g. improving
1204 "calling 'foo' from 'bar'"
1205 to
1206 "passing possibly-NULL pointer 'ptr' to 'foo' from 'bar' as param 1"
1207 when the diagnostic relates to later dereferencing 'ptr'. */
1208
1209 void
1210 diagnostic_manager::prune_for_sm_diagnostic (checker_path *path,
1211 const state_machine *sm,
1212 const svalue *sval,
1213 state_machine::state_t state) const
1214 {
1215 int idx = path->num_events () - 1;
1216 while (idx >= 0 && idx < (signed)path->num_events ())
1217 {
1218 checker_event *base_event = path->get_checker_event (idx);
1219 if (get_logger ())
1220 {
1221 if (sm)
1222 {
1223 if (sval)
1224 {
1225 label_text sval_desc = sval->get_desc ();
1226 log ("considering event %i (%s), with sval: %qs, state: %qs",
1227 idx, event_kind_to_string (base_event->m_kind),
1228 sval_desc.m_buffer, state->get_name ());
1229 }
1230 else
1231 log ("considering event %i (%s), with global state: %qs",
1232 idx, event_kind_to_string (base_event->m_kind),
1233 state->get_name ());
1234 }
1235 else
1236 log ("considering event %i", idx);
1237 }
1238
1239 switch (base_event->m_kind)
1240 {
1241 default:
1242 gcc_unreachable ();
1243
1244 case EK_DEBUG:
1245 if (m_verbosity < 4)
1246 {
1247 log ("filtering event %i: debug event", idx);
1248 path->delete_event (idx);
1249 }
1250 break;
1251
1252 case EK_CUSTOM:
1253 /* Don't filter custom events. */
1254 break;
1255
1256 case EK_STMT:
1257 {
1258 if (m_verbosity < 4)
1259 {
1260 log ("filtering event %i: statement event", idx);
1261 path->delete_event (idx);
1262 }
1263 }
1264 break;
1265
1266 case EK_FUNCTION_ENTRY:
1267 if (m_verbosity < 1)
1268 {
1269 log ("filtering event %i: function entry", idx);
1270 path->delete_event (idx);
1271 }
1272 break;
1273
1274 case EK_STATE_CHANGE:
1275 {
1276 state_change_event *state_change = (state_change_event *)base_event;
1277 gcc_assert (state_change->m_dst_state.m_region_model);
1278
1279 if (state_change->m_sval == sval)
1280 {
1281 if (state_change->m_origin)
1282 {
1283 if (get_logger ())
1284 {
1285 label_text sval_desc = sval->get_desc ();
1286 label_text origin_sval_desc
1287 = state_change->m_origin->get_desc ();
1288 log ("event %i:"
1289 " switching var of interest from %qs to %qs",
1290 idx, sval_desc.m_buffer,
1291 origin_sval_desc.m_buffer);
1292 }
1293 sval = state_change->m_origin;
1294 }
1295 log ("event %i: switching state of interest from %qs to %qs",
1296 idx, state_change->m_to->get_name (),
1297 state_change->m_from->get_name ());
1298 state = state_change->m_from;
1299 }
1300 else if (m_verbosity < 4)
1301 {
1302 if (get_logger ())
1303 {
1304 if (state_change->m_sval)
1305 {
1306 label_text change_sval_desc
1307 = state_change->m_sval->get_desc ();
1308 if (sval)
1309 {
1310 label_text sval_desc = sval->get_desc ();
1311 log ("filtering event %i:"
1312 " state change to %qs unrelated to %qs",
1313 idx, change_sval_desc.m_buffer,
1314 sval_desc.m_buffer);
1315 }
1316 else
1317 log ("filtering event %i: state change to %qs",
1318 idx, change_sval_desc.m_buffer);
1319 }
1320 else
1321 log ("filtering event %i: global state change", idx);
1322 }
1323 path->delete_event (idx);
1324 }
1325 }
1326 break;
1327
1328 case EK_START_CFG_EDGE:
1329 {
1330 cfg_edge_event *event = (cfg_edge_event *)base_event;
1331
1332 /* TODO: is this edge significant to var?
1333 See if var can be in other states in the dest, but not
1334 in other states in the src?
1335 Must have multiple sibling edges. */
1336
1337 if (event->should_filter_p (m_verbosity))
1338 {
1339 log ("filtering events %i and %i: CFG edge", idx, idx + 1);
1340 path->delete_event (idx);
1341 /* Also delete the corresponding EK_END_CFG_EDGE. */
1342 gcc_assert (path->get_checker_event (idx)->m_kind
1343 == EK_END_CFG_EDGE);
1344 path->delete_event (idx);
1345 }
1346 }
1347 break;
1348
1349 case EK_END_CFG_EDGE:
1350 /* These come in pairs with EK_START_CFG_EDGE events and are
1351 filtered when their start event is filtered. */
1352 break;
1353
1354 case EK_CALL_EDGE:
1355 {
1356 call_event *event = (call_event *)base_event;
1357 const callgraph_superedge& cg_superedge
1358 = event->get_callgraph_superedge ();
1359 const region_model *callee_model
1360 = event->m_eedge.m_dest->get_state ().m_region_model;
1361 tree callee_var = callee_model->get_representative_tree (sval);
1362 /* We could just use caller_model->get_representative_tree (sval);
1363 to get the caller_var, but for now use
1364 map_expr_from_callee_to_caller so as to only record critical
1365 state for parms and the like. */
1366 callsite_expr expr;
1367 tree caller_var
1368 = cg_superedge.map_expr_from_callee_to_caller (callee_var, &expr);
1369 if (caller_var)
1370 {
1371 if (get_logger ())
1372 {
1373 label_text sval_desc = sval->get_desc ();
1374 log ("event %i:"
1375 " recording critical state for %qs at call"
1376 " from %qE in callee to %qE in caller",
1377 idx, sval_desc.m_buffer, callee_var, caller_var);
1378 }
1379 if (expr.param_p ())
1380 event->record_critical_state (caller_var, state);
1381 }
1382 }
1383 break;
1384
1385 case EK_RETURN_EDGE:
1386 {
1387 if (sval)
1388 {
1389 return_event *event = (return_event *)base_event;
1390 const callgraph_superedge& cg_superedge
1391 = event->get_callgraph_superedge ();
1392 const region_model *caller_model
1393 = event->m_eedge.m_dest->get_state ().m_region_model;
1394 tree caller_var = caller_model->get_representative_tree (sval);
1395 callsite_expr expr;
1396 tree callee_var
1397 = cg_superedge.map_expr_from_caller_to_callee (caller_var,
1398 &expr);
1399 if (callee_var)
1400 {
1401 if (get_logger ())
1402 {
1403 label_text sval_desc = sval->get_desc ();
1404 log ("event %i:"
1405 " recording critical state for %qs at return"
1406 " from %qE in caller to %qE in callee",
1407 idx, sval_desc.m_buffer, callee_var, callee_var);
1408 }
1409 if (expr.return_value_p ())
1410 event->record_critical_state (callee_var, state);
1411 }
1412 }
1413 }
1414 break;
1415
1416 case EK_SETJMP:
1417 /* TODO: only show setjmp_events that matter i.e. those for which
1418 there is a later rewind event using them. */
1419 case EK_REWIND_FROM_LONGJMP:
1420 case EK_REWIND_TO_SETJMP:
1421 break;
1422
1423 case EK_WARNING:
1424 /* Always show the final "warning" event in the path. */
1425 break;
1426 }
1427 idx--;
1428 }
1429 }
1430
1431 /* Subroutine of diagnostic_manager::prune_for_sm_diagnostic.
1432 If *EXPR is not suitable to be the expression of interest in
1433 an sm-diagnostic, set *EXPR to NULL and log. */
1434
1435 void
1436 diagnostic_manager::update_for_unsuitable_sm_exprs (tree *expr) const
1437 {
1438 gcc_assert (expr);
1439 if (*expr && !can_be_expr_of_interest_p (*expr))
1440 {
1441 log ("new var %qE is unsuitable; setting var to NULL", *expr);
1442 *expr = NULL_TREE;
1443 }
1444 }
1445
1446 /* Second pass of diagnostic_manager::prune_path: remove redundant
1447 interprocedural information.
1448
1449 For example, given:
1450 (1)- calling "f2" from "f1"
1451 (2)--- entry to "f2"
1452 (3)--- calling "f3" from "f2"
1453 (4)----- entry to "f3"
1454 (5)--- returning to "f2" to "f3"
1455 (6)- returning to "f1" to "f2"
1456 with no other intervening events, then none of these events are
1457 likely to be interesting to the user.
1458
1459 Prune [..., call, function-entry, return, ...] triples repeatedly
1460 until nothing has changed. For the example above, this would
1461 remove events (3, 4, 5), and then remove events (1, 2, 6). */
1462
1463 void
1464 diagnostic_manager::prune_interproc_events (checker_path *path) const
1465 {
1466 bool changed = false;
1467 do
1468 {
1469 changed = false;
1470 int idx = path->num_events () - 1;
1471 while (idx >= 0)
1472 {
1473 /* Prune [..., call, function-entry, return, ...] triples. */
1474 if (idx + 2 < (signed)path->num_events ()
1475 && path->get_checker_event (idx)->is_call_p ()
1476 && path->get_checker_event (idx + 1)->is_function_entry_p ()
1477 && path->get_checker_event (idx + 2)->is_return_p ())
1478 {
1479 if (get_logger ())
1480 {
1481 label_text desc
1482 (path->get_checker_event (idx)->get_desc (false));
1483 log ("filtering events %i-%i:"
1484 " irrelevant call/entry/return: %s",
1485 idx, idx + 2, desc.m_buffer);
1486 desc.maybe_free ();
1487 }
1488 path->delete_event (idx + 2);
1489 path->delete_event (idx + 1);
1490 path->delete_event (idx);
1491 changed = true;
1492 idx--;
1493 continue;
1494 }
1495
1496 /* Prune [..., call, return, ...] pairs
1497 (for -fanalyzer-verbosity=0). */
1498 if (idx + 1 < (signed)path->num_events ()
1499 && path->get_checker_event (idx)->is_call_p ()
1500 && path->get_checker_event (idx + 1)->is_return_p ())
1501 {
1502 if (get_logger ())
1503 {
1504 label_text desc
1505 (path->get_checker_event (idx)->get_desc (false));
1506 log ("filtering events %i-%i:"
1507 " irrelevant call/return: %s",
1508 idx, idx + 1, desc.m_buffer);
1509 desc.maybe_free ();
1510 }
1511 path->delete_event (idx + 1);
1512 path->delete_event (idx);
1513 changed = true;
1514 idx--;
1515 continue;
1516 }
1517
1518 idx--;
1519 }
1520
1521 }
1522 while (changed);
1523 }
1524
1525 /* Final pass of diagnostic_manager::prune_path.
1526
1527 If all we're left with is in one function, then filter function entry
1528 events. */
1529
1530 void
1531 diagnostic_manager::finish_pruning (checker_path *path) const
1532 {
1533 if (!path->interprocedural_p ())
1534 {
1535 int idx = path->num_events () - 1;
1536 while (idx >= 0 && idx < (signed)path->num_events ())
1537 {
1538 checker_event *base_event = path->get_checker_event (idx);
1539 if (base_event->m_kind == EK_FUNCTION_ENTRY)
1540 {
1541 log ("filtering event %i:"
1542 " function entry for purely intraprocedural path", idx);
1543 path->delete_event (idx);
1544 }
1545 idx--;
1546 }
1547 }
1548 }
1549
1550 } // namespace ana
1551
1552 #endif /* #if ENABLE_ANALYZER */