tree-core.h: Include symtab.h.
[gcc.git] / gcc / var-tracking.c
1 /* Variable tracking routines for the GNU compiler.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the variable tracking pass. It computes where
21 variables are located (which registers or where in memory) at each position
22 in instruction stream and emits notes describing the locations.
23 Debug information (DWARF2 location lists) is finally generated from
24 these notes.
25 With this debug information, it is possible to show variables
26 even when debugging optimized code.
27
28 How does the variable tracking pass work?
29
30 First, it scans RTL code for uses, stores and clobbers (register/memory
31 references in instructions), for call insns and for stack adjustments
32 separately for each basic block and saves them to an array of micro
33 operations.
34 The micro operations of one instruction are ordered so that
35 pre-modifying stack adjustment < use < use with no var < call insn <
36 < clobber < set < post-modifying stack adjustment
37
38 Then, a forward dataflow analysis is performed to find out how locations
39 of variables change through code and to propagate the variable locations
40 along control flow graph.
41 The IN set for basic block BB is computed as a union of OUT sets of BB's
42 predecessors, the OUT set for BB is copied from the IN set for BB and
43 is changed according to micro operations in BB.
44
45 The IN and OUT sets for basic blocks consist of a current stack adjustment
46 (used for adjusting offset of variables addressed using stack pointer),
47 the table of structures describing the locations of parts of a variable
48 and for each physical register a linked list for each physical register.
49 The linked list is a list of variable parts stored in the register,
50 i.e. it is a list of triplets (reg, decl, offset) where decl is
51 REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for
52 effective deleting appropriate variable parts when we set or clobber the
53 register.
54
55 There may be more than one variable part in a register. The linked lists
56 should be pretty short so it is a good data structure here.
57 For example in the following code, register allocator may assign same
58 register to variables A and B, and both of them are stored in the same
59 register in CODE:
60
61 if (cond)
62 set A;
63 else
64 set B;
65 CODE;
66 if (cond)
67 use A;
68 else
69 use B;
70
71 Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations
72 are emitted to appropriate positions in RTL code. Each such a note describes
73 the location of one variable at the point in instruction stream where the
74 note is. There is no need to emit a note for each variable before each
75 instruction, we only emit these notes where the location of variable changes
76 (this means that we also emit notes for changes between the OUT set of the
77 previous block and the IN set of the current block).
78
79 The notes consist of two parts:
80 1. the declaration (from REG_EXPR or MEM_EXPR)
81 2. the location of a variable - it is either a simple register/memory
82 reference (for simple variables, for example int),
83 or a parallel of register/memory references (for a large variables
84 which consist of several parts, for example long long).
85
86 */
87
88 #include "config.h"
89 #include "system.h"
90 #include "coretypes.h"
91 #include "backend.h"
92 #include "rtl.h"
93 #include "alias.h"
94 #include "tree.h"
95 #include "varasm.h"
96 #include "stor-layout.h"
97 #include "cfgrtl.h"
98 #include "cfganal.h"
99 #include "tm_p.h"
100 #include "flags.h"
101 #include "insn-config.h"
102 #include "reload.h"
103 #include "alloc-pool.h"
104 #include "regs.h"
105 #include "expmed.h"
106 #include "dojump.h"
107 #include "explow.h"
108 #include "calls.h"
109 #include "emit-rtl.h"
110 #include "stmt.h"
111 #include "expr.h"
112 #include "tree-pass.h"
113 #include "tree-dfa.h"
114 #include "tree-ssa.h"
115 #include "cselib.h"
116 #include "target.h"
117 #include "params.h"
118 #include "diagnostic.h"
119 #include "tree-pretty-print.h"
120 #include "recog.h"
121 #include "rtl-iter.h"
122 #include "fibonacci_heap.h"
123
124 typedef fibonacci_heap <long, basic_block_def> bb_heap_t;
125 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t;
126
127 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code
128 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl.
129 Currently the value is the same as IDENTIFIER_NODE, which has such
130 a property. If this compile time assertion ever fails, make sure that
131 the new tree code that equals (int) VALUE has the same property. */
132 extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1];
133
134 /* Type of micro operation. */
135 enum micro_operation_type
136 {
137 MO_USE, /* Use location (REG or MEM). */
138 MO_USE_NO_VAR,/* Use location which is not associated with a variable
139 or the variable is not trackable. */
140 MO_VAL_USE, /* Use location which is associated with a value. */
141 MO_VAL_LOC, /* Use location which appears in a debug insn. */
142 MO_VAL_SET, /* Set location associated with a value. */
143 MO_SET, /* Set location. */
144 MO_COPY, /* Copy the same portion of a variable from one
145 location to another. */
146 MO_CLOBBER, /* Clobber location. */
147 MO_CALL, /* Call insn. */
148 MO_ADJUST /* Adjust stack pointer. */
149
150 };
151
152 static const char * const ATTRIBUTE_UNUSED
153 micro_operation_type_name[] = {
154 "MO_USE",
155 "MO_USE_NO_VAR",
156 "MO_VAL_USE",
157 "MO_VAL_LOC",
158 "MO_VAL_SET",
159 "MO_SET",
160 "MO_COPY",
161 "MO_CLOBBER",
162 "MO_CALL",
163 "MO_ADJUST"
164 };
165
166 /* Where shall the note be emitted? BEFORE or AFTER the instruction.
167 Notes emitted as AFTER_CALL are to take effect during the call,
168 rather than after the call. */
169 enum emit_note_where
170 {
171 EMIT_NOTE_BEFORE_INSN,
172 EMIT_NOTE_AFTER_INSN,
173 EMIT_NOTE_AFTER_CALL_INSN
174 };
175
176 /* Structure holding information about micro operation. */
177 typedef struct micro_operation_def
178 {
179 /* Type of micro operation. */
180 enum micro_operation_type type;
181
182 /* The instruction which the micro operation is in, for MO_USE,
183 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent
184 instruction or note in the original flow (before any var-tracking
185 notes are inserted, to simplify emission of notes), for MO_SET
186 and MO_CLOBBER. */
187 rtx_insn *insn;
188
189 union {
190 /* Location. For MO_SET and MO_COPY, this is the SET that
191 performs the assignment, if known, otherwise it is the target
192 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a
193 CONCAT of the VALUE and the LOC associated with it. For
194 MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION
195 associated with it. */
196 rtx loc;
197
198 /* Stack adjustment. */
199 HOST_WIDE_INT adjust;
200 } u;
201 } micro_operation;
202
203
204 /* A declaration of a variable, or an RTL value being handled like a
205 declaration. */
206 typedef void *decl_or_value;
207
208 /* Return true if a decl_or_value DV is a DECL or NULL. */
209 static inline bool
210 dv_is_decl_p (decl_or_value dv)
211 {
212 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE;
213 }
214
215 /* Return true if a decl_or_value is a VALUE rtl. */
216 static inline bool
217 dv_is_value_p (decl_or_value dv)
218 {
219 return dv && !dv_is_decl_p (dv);
220 }
221
222 /* Return the decl in the decl_or_value. */
223 static inline tree
224 dv_as_decl (decl_or_value dv)
225 {
226 gcc_checking_assert (dv_is_decl_p (dv));
227 return (tree) dv;
228 }
229
230 /* Return the value in the decl_or_value. */
231 static inline rtx
232 dv_as_value (decl_or_value dv)
233 {
234 gcc_checking_assert (dv_is_value_p (dv));
235 return (rtx)dv;
236 }
237
238 /* Return the opaque pointer in the decl_or_value. */
239 static inline void *
240 dv_as_opaque (decl_or_value dv)
241 {
242 return dv;
243 }
244
245
246 /* Description of location of a part of a variable. The content of a physical
247 register is described by a chain of these structures.
248 The chains are pretty short (usually 1 or 2 elements) and thus
249 chain is the best data structure. */
250 typedef struct attrs_def
251 {
252 /* Pointer to next member of the list. */
253 struct attrs_def *next;
254
255 /* The rtx of register. */
256 rtx loc;
257
258 /* The declaration corresponding to LOC. */
259 decl_or_value dv;
260
261 /* Offset from start of DECL. */
262 HOST_WIDE_INT offset;
263
264 /* Pool allocation new operator. */
265 inline void *operator new (size_t)
266 {
267 return pool.allocate ();
268 }
269
270 /* Delete operator utilizing pool allocation. */
271 inline void operator delete (void *ptr)
272 {
273 pool.remove ((attrs_def *) ptr);
274 }
275
276 /* Memory allocation pool. */
277 static pool_allocator<attrs_def> pool;
278 } *attrs;
279
280 /* Structure for chaining the locations. */
281 typedef struct location_chain_def
282 {
283 /* Next element in the chain. */
284 struct location_chain_def *next;
285
286 /* The location (REG, MEM or VALUE). */
287 rtx loc;
288
289 /* The "value" stored in this location. */
290 rtx set_src;
291
292 /* Initialized? */
293 enum var_init_status init;
294
295 /* Pool allocation new operator. */
296 inline void *operator new (size_t)
297 {
298 return pool.allocate ();
299 }
300
301 /* Delete operator utilizing pool allocation. */
302 inline void operator delete (void *ptr)
303 {
304 pool.remove ((location_chain_def *) ptr);
305 }
306
307 /* Memory allocation pool. */
308 static pool_allocator<location_chain_def> pool;
309 } *location_chain;
310
311 /* A vector of loc_exp_dep holds the active dependencies of a one-part
312 DV on VALUEs, i.e., the VALUEs expanded so as to form the current
313 location of DV. Each entry is also part of VALUE' s linked-list of
314 backlinks back to DV. */
315 typedef struct loc_exp_dep_s
316 {
317 /* The dependent DV. */
318 decl_or_value dv;
319 /* The dependency VALUE or DECL_DEBUG. */
320 rtx value;
321 /* The next entry in VALUE's backlinks list. */
322 struct loc_exp_dep_s *next;
323 /* A pointer to the pointer to this entry (head or prev's next) in
324 the doubly-linked list. */
325 struct loc_exp_dep_s **pprev;
326
327 /* Pool allocation new operator. */
328 inline void *operator new (size_t)
329 {
330 return pool.allocate ();
331 }
332
333 /* Delete operator utilizing pool allocation. */
334 inline void operator delete (void *ptr)
335 {
336 pool.remove ((loc_exp_dep_s *) ptr);
337 }
338
339 /* Memory allocation pool. */
340 static pool_allocator<loc_exp_dep_s> pool;
341 } loc_exp_dep;
342
343
344 /* This data structure holds information about the depth of a variable
345 expansion. */
346 typedef struct expand_depth_struct
347 {
348 /* This measures the complexity of the expanded expression. It
349 grows by one for each level of expansion that adds more than one
350 operand. */
351 int complexity;
352 /* This counts the number of ENTRY_VALUE expressions in an
353 expansion. We want to minimize their use. */
354 int entryvals;
355 } expand_depth;
356
357 /* This data structure is allocated for one-part variables at the time
358 of emitting notes. */
359 struct onepart_aux
360 {
361 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc
362 computation used the expansion of this variable, and that ought
363 to be notified should this variable change. If the DV's cur_loc
364 expanded to NULL, all components of the loc list are regarded as
365 active, so that any changes in them give us a chance to get a
366 location. Otherwise, only components of the loc that expanded to
367 non-NULL are regarded as active dependencies. */
368 loc_exp_dep *backlinks;
369 /* This holds the LOC that was expanded into cur_loc. We need only
370 mark a one-part variable as changed if the FROM loc is removed,
371 or if it has no known location and a loc is added, or if it gets
372 a change notification from any of its active dependencies. */
373 rtx from;
374 /* The depth of the cur_loc expression. */
375 expand_depth depth;
376 /* Dependencies actively used when expand FROM into cur_loc. */
377 vec<loc_exp_dep, va_heap, vl_embed> deps;
378 };
379
380 /* Structure describing one part of variable. */
381 typedef struct variable_part_def
382 {
383 /* Chain of locations of the part. */
384 location_chain loc_chain;
385
386 /* Location which was last emitted to location list. */
387 rtx cur_loc;
388
389 union variable_aux
390 {
391 /* The offset in the variable, if !var->onepart. */
392 HOST_WIDE_INT offset;
393
394 /* Pointer to auxiliary data, if var->onepart and emit_notes. */
395 struct onepart_aux *onepaux;
396 } aux;
397 } variable_part;
398
399 /* Maximum number of location parts. */
400 #define MAX_VAR_PARTS 16
401
402 /* Enumeration type used to discriminate various types of one-part
403 variables. */
404 typedef enum onepart_enum
405 {
406 /* Not a one-part variable. */
407 NOT_ONEPART = 0,
408 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */
409 ONEPART_VDECL = 1,
410 /* A DEBUG_EXPR_DECL. */
411 ONEPART_DEXPR = 2,
412 /* A VALUE. */
413 ONEPART_VALUE = 3
414 } onepart_enum_t;
415
416 /* Structure describing where the variable is located. */
417 typedef struct variable_def
418 {
419 /* The declaration of the variable, or an RTL value being handled
420 like a declaration. */
421 decl_or_value dv;
422
423 /* Reference count. */
424 int refcount;
425
426 /* Number of variable parts. */
427 char n_var_parts;
428
429 /* What type of DV this is, according to enum onepart_enum. */
430 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT;
431
432 /* True if this variable_def struct is currently in the
433 changed_variables hash table. */
434 bool in_changed_variables;
435
436 /* The variable parts. */
437 variable_part var_part[1];
438 } *variable;
439 typedef const struct variable_def *const_variable;
440
441 /* Pointer to the BB's information specific to variable tracking pass. */
442 #define VTI(BB) ((variable_tracking_info) (BB)->aux)
443
444 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */
445 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0)
446
447 #if ENABLE_CHECKING && (GCC_VERSION >= 2007)
448
449 /* Access VAR's Ith part's offset, checking that it's not a one-part
450 variable. */
451 #define VAR_PART_OFFSET(var, i) __extension__ \
452 (*({ variable const __v = (var); \
453 gcc_checking_assert (!__v->onepart); \
454 &__v->var_part[(i)].aux.offset; }))
455
456 /* Access VAR's one-part auxiliary data, checking that it is a
457 one-part variable. */
458 #define VAR_LOC_1PAUX(var) __extension__ \
459 (*({ variable const __v = (var); \
460 gcc_checking_assert (__v->onepart); \
461 &__v->var_part[0].aux.onepaux; }))
462
463 #else
464 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset)
465 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux)
466 #endif
467
468 /* These are accessor macros for the one-part auxiliary data. When
469 convenient for users, they're guarded by tests that the data was
470 allocated. */
471 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \
472 ? VAR_LOC_1PAUX (var)->backlinks \
473 : NULL)
474 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \
475 ? &VAR_LOC_1PAUX (var)->backlinks \
476 : NULL)
477 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from)
478 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth)
479 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \
480 ? &VAR_LOC_1PAUX (var)->deps \
481 : NULL)
482
483
484
485 typedef unsigned int dvuid;
486
487 /* Return the uid of DV. */
488
489 static inline dvuid
490 dv_uid (decl_or_value dv)
491 {
492 if (dv_is_value_p (dv))
493 return CSELIB_VAL_PTR (dv_as_value (dv))->uid;
494 else
495 return DECL_UID (dv_as_decl (dv));
496 }
497
498 /* Compute the hash from the uid. */
499
500 static inline hashval_t
501 dv_uid2hash (dvuid uid)
502 {
503 return uid;
504 }
505
506 /* The hash function for a mask table in a shared_htab chain. */
507
508 static inline hashval_t
509 dv_htab_hash (decl_or_value dv)
510 {
511 return dv_uid2hash (dv_uid (dv));
512 }
513
514 static void variable_htab_free (void *);
515
516 /* Variable hashtable helpers. */
517
518 struct variable_hasher : pointer_hash <variable_def>
519 {
520 typedef void *compare_type;
521 static inline hashval_t hash (const variable_def *);
522 static inline bool equal (const variable_def *, const void *);
523 static inline void remove (variable_def *);
524 };
525
526 /* The hash function for variable_htab, computes the hash value
527 from the declaration of variable X. */
528
529 inline hashval_t
530 variable_hasher::hash (const variable_def *v)
531 {
532 return dv_htab_hash (v->dv);
533 }
534
535 /* Compare the declaration of variable X with declaration Y. */
536
537 inline bool
538 variable_hasher::equal (const variable_def *v, const void *y)
539 {
540 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y);
541
542 return (dv_as_opaque (v->dv) == dv_as_opaque (dv));
543 }
544
545 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
546
547 inline void
548 variable_hasher::remove (variable_def *var)
549 {
550 variable_htab_free (var);
551 }
552
553 typedef hash_table<variable_hasher> variable_table_type;
554 typedef variable_table_type::iterator variable_iterator_type;
555
556 /* Structure for passing some other parameters to function
557 emit_note_insn_var_location. */
558 typedef struct emit_note_data_def
559 {
560 /* The instruction which the note will be emitted before/after. */
561 rtx_insn *insn;
562
563 /* Where the note will be emitted (before/after insn)? */
564 enum emit_note_where where;
565
566 /* The variables and values active at this point. */
567 variable_table_type *vars;
568 } emit_note_data;
569
570 /* Structure holding a refcounted hash table. If refcount > 1,
571 it must be first unshared before modified. */
572 typedef struct shared_hash_def
573 {
574 /* Reference count. */
575 int refcount;
576
577 /* Actual hash table. */
578 variable_table_type *htab;
579
580 /* Pool allocation new operator. */
581 inline void *operator new (size_t)
582 {
583 return pool.allocate ();
584 }
585
586 /* Delete operator utilizing pool allocation. */
587 inline void operator delete (void *ptr)
588 {
589 pool.remove ((shared_hash_def *) ptr);
590 }
591
592 /* Memory allocation pool. */
593 static pool_allocator<shared_hash_def> pool;
594 } *shared_hash;
595
596 /* Structure holding the IN or OUT set for a basic block. */
597 typedef struct dataflow_set_def
598 {
599 /* Adjustment of stack offset. */
600 HOST_WIDE_INT stack_adjust;
601
602 /* Attributes for registers (lists of attrs). */
603 attrs regs[FIRST_PSEUDO_REGISTER];
604
605 /* Variable locations. */
606 shared_hash vars;
607
608 /* Vars that is being traversed. */
609 shared_hash traversed_vars;
610 } dataflow_set;
611
612 /* The structure (one for each basic block) containing the information
613 needed for variable tracking. */
614 typedef struct variable_tracking_info_def
615 {
616 /* The vector of micro operations. */
617 vec<micro_operation> mos;
618
619 /* The IN and OUT set for dataflow analysis. */
620 dataflow_set in;
621 dataflow_set out;
622
623 /* The permanent-in dataflow set for this block. This is used to
624 hold values for which we had to compute entry values. ??? This
625 should probably be dynamically allocated, to avoid using more
626 memory in non-debug builds. */
627 dataflow_set *permp;
628
629 /* Has the block been visited in DFS? */
630 bool visited;
631
632 /* Has the block been flooded in VTA? */
633 bool flooded;
634
635 } *variable_tracking_info;
636
637 /* Alloc pool for struct attrs_def. */
638 pool_allocator<attrs_def> attrs_def::pool ("attrs_def pool", 1024);
639
640 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */
641
642 static pool_allocator<variable_def> var_pool
643 ("variable_def pool", 64,
644 (MAX_VAR_PARTS - 1) * sizeof (((variable)NULL)->var_part[0]));
645
646 /* Alloc pool for struct variable_def with a single var_part entry. */
647 static pool_allocator<variable_def> valvar_pool
648 ("small variable_def pool", 256);
649
650 /* Alloc pool for struct location_chain_def. */
651 pool_allocator<location_chain_def> location_chain_def::pool
652 ("location_chain_def pool", 1024);
653
654 /* Alloc pool for struct shared_hash_def. */
655 pool_allocator<shared_hash_def> shared_hash_def::pool
656 ("shared_hash_def pool", 256);
657
658 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */
659 pool_allocator<loc_exp_dep> loc_exp_dep::pool ("loc_exp_dep pool", 64);
660
661 /* Changed variables, notes will be emitted for them. */
662 static variable_table_type *changed_variables;
663
664 /* Shall notes be emitted? */
665 static bool emit_notes;
666
667 /* Values whose dynamic location lists have gone empty, but whose
668 cselib location lists are still usable. Use this to hold the
669 current location, the backlinks, etc, during emit_notes. */
670 static variable_table_type *dropped_values;
671
672 /* Empty shared hashtable. */
673 static shared_hash empty_shared_hash;
674
675 /* Scratch register bitmap used by cselib_expand_value_rtx. */
676 static bitmap scratch_regs = NULL;
677
678 #ifdef HAVE_window_save
679 typedef struct GTY(()) parm_reg {
680 rtx outgoing;
681 rtx incoming;
682 } parm_reg_t;
683
684
685 /* Vector of windowed parameter registers, if any. */
686 static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
687 #endif
688
689 /* Variable used to tell whether cselib_process_insn called our hook. */
690 static bool cselib_hook_called;
691
692 /* Local function prototypes. */
693 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *,
694 HOST_WIDE_INT *);
695 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *,
696 HOST_WIDE_INT *);
697 static bool vt_stack_adjustments (void);
698
699 static void init_attrs_list_set (attrs *);
700 static void attrs_list_clear (attrs *);
701 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT);
702 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx);
703 static void attrs_list_copy (attrs *, attrs);
704 static void attrs_list_union (attrs *, attrs);
705
706 static variable_def **unshare_variable (dataflow_set *set, variable_def **slot,
707 variable var, enum var_init_status);
708 static void vars_copy (variable_table_type *, variable_table_type *);
709 static tree var_debug_decl (tree);
710 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx);
711 static void var_reg_delete_and_set (dataflow_set *, rtx, bool,
712 enum var_init_status, rtx);
713 static void var_reg_delete (dataflow_set *, rtx, bool);
714 static void var_regno_delete (dataflow_set *, int);
715 static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx);
716 static void var_mem_delete_and_set (dataflow_set *, rtx, bool,
717 enum var_init_status, rtx);
718 static void var_mem_delete (dataflow_set *, rtx, bool);
719
720 static void dataflow_set_init (dataflow_set *);
721 static void dataflow_set_clear (dataflow_set *);
722 static void dataflow_set_copy (dataflow_set *, dataflow_set *);
723 static int variable_union_info_cmp_pos (const void *, const void *);
724 static void dataflow_set_union (dataflow_set *, dataflow_set *);
725 static location_chain find_loc_in_1pdv (rtx, variable, variable_table_type *);
726 static bool canon_value_cmp (rtx, rtx);
727 static int loc_cmp (rtx, rtx);
728 static bool variable_part_different_p (variable_part *, variable_part *);
729 static bool onepart_variable_different_p (variable, variable);
730 static bool variable_different_p (variable, variable);
731 static bool dataflow_set_different (dataflow_set *, dataflow_set *);
732 static void dataflow_set_destroy (dataflow_set *);
733
734 static bool contains_symbol_ref (rtx);
735 static bool track_expr_p (tree, bool);
736 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT);
737 static void add_uses_1 (rtx *, void *);
738 static void add_stores (rtx, const_rtx, void *);
739 static bool compute_bb_dataflow (basic_block);
740 static bool vt_find_locations (void);
741
742 static void dump_attrs_list (attrs);
743 static void dump_var (variable);
744 static void dump_vars (variable_table_type *);
745 static void dump_dataflow_set (dataflow_set *);
746 static void dump_dataflow_sets (void);
747
748 static void set_dv_changed (decl_or_value, bool);
749 static void variable_was_changed (variable, dataflow_set *);
750 static variable_def **set_slot_part (dataflow_set *, rtx, variable_def **,
751 decl_or_value, HOST_WIDE_INT,
752 enum var_init_status, rtx);
753 static void set_variable_part (dataflow_set *, rtx,
754 decl_or_value, HOST_WIDE_INT,
755 enum var_init_status, rtx, enum insert_option);
756 static variable_def **clobber_slot_part (dataflow_set *, rtx,
757 variable_def **, HOST_WIDE_INT, rtx);
758 static void clobber_variable_part (dataflow_set *, rtx,
759 decl_or_value, HOST_WIDE_INT, rtx);
760 static variable_def **delete_slot_part (dataflow_set *, rtx, variable_def **,
761 HOST_WIDE_INT);
762 static void delete_variable_part (dataflow_set *, rtx,
763 decl_or_value, HOST_WIDE_INT);
764 static void emit_notes_in_bb (basic_block, dataflow_set *);
765 static void vt_emit_notes (void);
766
767 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *);
768 static void vt_add_function_parameters (void);
769 static bool vt_initialize (void);
770 static void vt_finalize (void);
771
772 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */
773
774 static int
775 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff,
776 void *arg)
777 {
778 if (dest != stack_pointer_rtx)
779 return 0;
780
781 switch (GET_CODE (op))
782 {
783 case PRE_INC:
784 case PRE_DEC:
785 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff);
786 return 0;
787 case POST_INC:
788 case POST_DEC:
789 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff);
790 return 0;
791 case PRE_MODIFY:
792 case POST_MODIFY:
793 /* We handle only adjustments by constant amount. */
794 gcc_assert (GET_CODE (src) == PLUS
795 && CONST_INT_P (XEXP (src, 1))
796 && XEXP (src, 0) == stack_pointer_rtx);
797 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY]
798 -= INTVAL (XEXP (src, 1));
799 return 0;
800 default:
801 gcc_unreachable ();
802 }
803 }
804
805 /* Given a SET, calculate the amount of stack adjustment it contains
806 PRE- and POST-modifying stack pointer.
807 This function is similar to stack_adjust_offset. */
808
809 static void
810 stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
811 HOST_WIDE_INT *post)
812 {
813 rtx src = SET_SRC (pattern);
814 rtx dest = SET_DEST (pattern);
815 enum rtx_code code;
816
817 if (dest == stack_pointer_rtx)
818 {
819 /* (set (reg sp) (plus (reg sp) (const_int))) */
820 code = GET_CODE (src);
821 if (! (code == PLUS || code == MINUS)
822 || XEXP (src, 0) != stack_pointer_rtx
823 || !CONST_INT_P (XEXP (src, 1)))
824 return;
825
826 if (code == MINUS)
827 *post += INTVAL (XEXP (src, 1));
828 else
829 *post -= INTVAL (XEXP (src, 1));
830 return;
831 }
832 HOST_WIDE_INT res[2] = { 0, 0 };
833 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res);
834 *pre += res[0];
835 *post += res[1];
836 }
837
838 /* Given an INSN, calculate the amount of stack adjustment it contains
839 PRE- and POST-modifying stack pointer. */
840
841 static void
842 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre,
843 HOST_WIDE_INT *post)
844 {
845 rtx pattern;
846
847 *pre = 0;
848 *post = 0;
849
850 pattern = PATTERN (insn);
851 if (RTX_FRAME_RELATED_P (insn))
852 {
853 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
854 if (expr)
855 pattern = XEXP (expr, 0);
856 }
857
858 if (GET_CODE (pattern) == SET)
859 stack_adjust_offset_pre_post (pattern, pre, post);
860 else if (GET_CODE (pattern) == PARALLEL
861 || GET_CODE (pattern) == SEQUENCE)
862 {
863 int i;
864
865 /* There may be stack adjustments inside compound insns. Search
866 for them. */
867 for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
868 if (GET_CODE (XVECEXP (pattern, 0, i)) == SET)
869 stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post);
870 }
871 }
872
873 /* Compute stack adjustments for all blocks by traversing DFS tree.
874 Return true when the adjustments on all incoming edges are consistent.
875 Heavily borrowed from pre_and_rev_post_order_compute. */
876
877 static bool
878 vt_stack_adjustments (void)
879 {
880 edge_iterator *stack;
881 int sp;
882
883 /* Initialize entry block. */
884 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true;
885 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust
886 = INCOMING_FRAME_SP_OFFSET;
887 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust
888 = INCOMING_FRAME_SP_OFFSET;
889
890 /* Allocate stack for back-tracking up CFG. */
891 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
892 sp = 0;
893
894 /* Push the first edge on to the stack. */
895 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
896
897 while (sp)
898 {
899 edge_iterator ei;
900 basic_block src;
901 basic_block dest;
902
903 /* Look at the edge on the top of the stack. */
904 ei = stack[sp - 1];
905 src = ei_edge (ei)->src;
906 dest = ei_edge (ei)->dest;
907
908 /* Check if the edge destination has been visited yet. */
909 if (!VTI (dest)->visited)
910 {
911 rtx_insn *insn;
912 HOST_WIDE_INT pre, post, offset;
913 VTI (dest)->visited = true;
914 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust;
915
916 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
917 for (insn = BB_HEAD (dest);
918 insn != NEXT_INSN (BB_END (dest));
919 insn = NEXT_INSN (insn))
920 if (INSN_P (insn))
921 {
922 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
923 offset += pre + post;
924 }
925
926 VTI (dest)->out.stack_adjust = offset;
927
928 if (EDGE_COUNT (dest->succs) > 0)
929 /* Since the DEST node has been visited for the first
930 time, check its successors. */
931 stack[sp++] = ei_start (dest->succs);
932 }
933 else
934 {
935 /* We can end up with different stack adjustments for the exit block
936 of a shrink-wrapped function if stack_adjust_offset_pre_post
937 doesn't understand the rtx pattern used to restore the stack
938 pointer in the epilogue. For example, on s390(x), the stack
939 pointer is often restored via a load-multiple instruction
940 and so no stack_adjust offset is recorded for it. This means
941 that the stack offset at the end of the epilogue block is the
942 the same as the offset before the epilogue, whereas other paths
943 to the exit block will have the correct stack_adjust.
944
945 It is safe to ignore these differences because (a) we never
946 use the stack_adjust for the exit block in this pass and
947 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped
948 function are correct.
949
950 We must check whether the adjustments on other edges are
951 the same though. */
952 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
953 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust)
954 {
955 free (stack);
956 return false;
957 }
958
959 if (! ei_one_before_end_p (ei))
960 /* Go to the next edge. */
961 ei_next (&stack[sp - 1]);
962 else
963 /* Return to previous level if there are no more edges. */
964 sp--;
965 }
966 }
967
968 free (stack);
969 return true;
970 }
971
972 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or
973 hard_frame_pointer_rtx is being mapped to it and offset for it. */
974 static rtx cfa_base_rtx;
975 static HOST_WIDE_INT cfa_base_offset;
976
977 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx
978 or hard_frame_pointer_rtx. */
979
980 static inline rtx
981 compute_cfa_pointer (HOST_WIDE_INT adjustment)
982 {
983 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset);
984 }
985
986 /* Adjustment for hard_frame_pointer_rtx to cfa base reg,
987 or -1 if the replacement shouldn't be done. */
988 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1;
989
990 /* Data for adjust_mems callback. */
991
992 struct adjust_mem_data
993 {
994 bool store;
995 machine_mode mem_mode;
996 HOST_WIDE_INT stack_adjust;
997 rtx_expr_list *side_effects;
998 };
999
1000 /* Helper for adjust_mems. Return true if X is suitable for
1001 transformation of wider mode arithmetics to narrower mode. */
1002
1003 static bool
1004 use_narrower_mode_test (rtx x, const_rtx subreg)
1005 {
1006 subrtx_var_iterator::array_type array;
1007 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
1008 {
1009 rtx x = *iter;
1010 if (CONSTANT_P (x))
1011 iter.skip_subrtxes ();
1012 else
1013 switch (GET_CODE (x))
1014 {
1015 case REG:
1016 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode))
1017 return false;
1018 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x,
1019 subreg_lowpart_offset (GET_MODE (subreg),
1020 GET_MODE (x))))
1021 return false;
1022 break;
1023 case PLUS:
1024 case MINUS:
1025 case MULT:
1026 break;
1027 case ASHIFT:
1028 iter.substitute (XEXP (x, 0));
1029 break;
1030 default:
1031 return false;
1032 }
1033 }
1034 return true;
1035 }
1036
1037 /* Transform X into narrower mode MODE from wider mode WMODE. */
1038
1039 static rtx
1040 use_narrower_mode (rtx x, machine_mode mode, machine_mode wmode)
1041 {
1042 rtx op0, op1;
1043 if (CONSTANT_P (x))
1044 return lowpart_subreg (mode, x, wmode);
1045 switch (GET_CODE (x))
1046 {
1047 case REG:
1048 return lowpart_subreg (mode, x, wmode);
1049 case PLUS:
1050 case MINUS:
1051 case MULT:
1052 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1053 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode);
1054 return simplify_gen_binary (GET_CODE (x), mode, op0, op1);
1055 case ASHIFT:
1056 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode);
1057 op1 = XEXP (x, 1);
1058 /* Ensure shift amount is not wider than mode. */
1059 if (GET_MODE (op1) == VOIDmode)
1060 op1 = lowpart_subreg (mode, op1, wmode);
1061 else if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (GET_MODE (op1)))
1062 op1 = lowpart_subreg (mode, op1, GET_MODE (op1));
1063 return simplify_gen_binary (ASHIFT, mode, op0, op1);
1064 default:
1065 gcc_unreachable ();
1066 }
1067 }
1068
1069 /* Helper function for adjusting used MEMs. */
1070
1071 static rtx
1072 adjust_mems (rtx loc, const_rtx old_rtx, void *data)
1073 {
1074 struct adjust_mem_data *amd = (struct adjust_mem_data *) data;
1075 rtx mem, addr = loc, tem;
1076 machine_mode mem_mode_save;
1077 bool store_save;
1078 switch (GET_CODE (loc))
1079 {
1080 case REG:
1081 /* Don't do any sp or fp replacements outside of MEM addresses
1082 on the LHS. */
1083 if (amd->mem_mode == VOIDmode && amd->store)
1084 return loc;
1085 if (loc == stack_pointer_rtx
1086 && !frame_pointer_needed
1087 && cfa_base_rtx)
1088 return compute_cfa_pointer (amd->stack_adjust);
1089 else if (loc == hard_frame_pointer_rtx
1090 && frame_pointer_needed
1091 && hard_frame_pointer_adjustment != -1
1092 && cfa_base_rtx)
1093 return compute_cfa_pointer (hard_frame_pointer_adjustment);
1094 gcc_checking_assert (loc != virtual_incoming_args_rtx);
1095 return loc;
1096 case MEM:
1097 mem = loc;
1098 if (!amd->store)
1099 {
1100 mem = targetm.delegitimize_address (mem);
1101 if (mem != loc && !MEM_P (mem))
1102 return simplify_replace_fn_rtx (mem, old_rtx, adjust_mems, data);
1103 }
1104
1105 addr = XEXP (mem, 0);
1106 mem_mode_save = amd->mem_mode;
1107 amd->mem_mode = GET_MODE (mem);
1108 store_save = amd->store;
1109 amd->store = false;
1110 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1111 amd->store = store_save;
1112 amd->mem_mode = mem_mode_save;
1113 if (mem == loc)
1114 addr = targetm.delegitimize_address (addr);
1115 if (addr != XEXP (mem, 0))
1116 mem = replace_equiv_address_nv (mem, addr);
1117 if (!amd->store)
1118 mem = avoid_constant_pool_reference (mem);
1119 return mem;
1120 case PRE_INC:
1121 case PRE_DEC:
1122 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1123 gen_int_mode (GET_CODE (loc) == PRE_INC
1124 ? GET_MODE_SIZE (amd->mem_mode)
1125 : -GET_MODE_SIZE (amd->mem_mode),
1126 GET_MODE (loc)));
1127 case POST_INC:
1128 case POST_DEC:
1129 if (addr == loc)
1130 addr = XEXP (loc, 0);
1131 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode);
1132 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1133 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0),
1134 gen_int_mode ((GET_CODE (loc) == PRE_INC
1135 || GET_CODE (loc) == POST_INC)
1136 ? GET_MODE_SIZE (amd->mem_mode)
1137 : -GET_MODE_SIZE (amd->mem_mode),
1138 GET_MODE (loc)));
1139 store_save = amd->store;
1140 amd->store = false;
1141 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data);
1142 amd->store = store_save;
1143 amd->side_effects = alloc_EXPR_LIST (0,
1144 gen_rtx_SET (XEXP (loc, 0), tem),
1145 amd->side_effects);
1146 return addr;
1147 case PRE_MODIFY:
1148 addr = XEXP (loc, 1);
1149 case POST_MODIFY:
1150 if (addr == loc)
1151 addr = XEXP (loc, 0);
1152 gcc_assert (amd->mem_mode != VOIDmode);
1153 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1154 store_save = amd->store;
1155 amd->store = false;
1156 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx,
1157 adjust_mems, data);
1158 amd->store = store_save;
1159 amd->side_effects = alloc_EXPR_LIST (0,
1160 gen_rtx_SET (XEXP (loc, 0), tem),
1161 amd->side_effects);
1162 return addr;
1163 case SUBREG:
1164 /* First try without delegitimization of whole MEMs and
1165 avoid_constant_pool_reference, which is more likely to succeed. */
1166 store_save = amd->store;
1167 amd->store = true;
1168 addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, adjust_mems,
1169 data);
1170 amd->store = store_save;
1171 mem = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data);
1172 if (mem == SUBREG_REG (loc))
1173 {
1174 tem = loc;
1175 goto finish_subreg;
1176 }
1177 tem = simplify_gen_subreg (GET_MODE (loc), mem,
1178 GET_MODE (SUBREG_REG (loc)),
1179 SUBREG_BYTE (loc));
1180 if (tem)
1181 goto finish_subreg;
1182 tem = simplify_gen_subreg (GET_MODE (loc), addr,
1183 GET_MODE (SUBREG_REG (loc)),
1184 SUBREG_BYTE (loc));
1185 if (tem == NULL_RTX)
1186 tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc));
1187 finish_subreg:
1188 if (MAY_HAVE_DEBUG_INSNS
1189 && GET_CODE (tem) == SUBREG
1190 && (GET_CODE (SUBREG_REG (tem)) == PLUS
1191 || GET_CODE (SUBREG_REG (tem)) == MINUS
1192 || GET_CODE (SUBREG_REG (tem)) == MULT
1193 || GET_CODE (SUBREG_REG (tem)) == ASHIFT)
1194 && (GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT
1195 || GET_MODE_CLASS (GET_MODE (tem)) == MODE_PARTIAL_INT)
1196 && (GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT
1197 || GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_PARTIAL_INT)
1198 && GET_MODE_PRECISION (GET_MODE (tem))
1199 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (tem)))
1200 && subreg_lowpart_p (tem)
1201 && use_narrower_mode_test (SUBREG_REG (tem), tem))
1202 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem),
1203 GET_MODE (SUBREG_REG (tem)));
1204 return tem;
1205 case ASM_OPERANDS:
1206 /* Don't do any replacements in second and following
1207 ASM_OPERANDS of inline-asm with multiple sets.
1208 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC
1209 and ASM_OPERANDS_LABEL_VEC need to be equal between
1210 all the ASM_OPERANDs in the insn and adjust_insn will
1211 fix this up. */
1212 if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0)
1213 return loc;
1214 break;
1215 default:
1216 break;
1217 }
1218 return NULL_RTX;
1219 }
1220
1221 /* Helper function for replacement of uses. */
1222
1223 static void
1224 adjust_mem_uses (rtx *x, void *data)
1225 {
1226 rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, adjust_mems, data);
1227 if (new_x != *x)
1228 validate_change (NULL_RTX, x, new_x, true);
1229 }
1230
1231 /* Helper function for replacement of stores. */
1232
1233 static void
1234 adjust_mem_stores (rtx loc, const_rtx expr, void *data)
1235 {
1236 if (MEM_P (loc))
1237 {
1238 rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX,
1239 adjust_mems, data);
1240 if (new_dest != SET_DEST (expr))
1241 {
1242 rtx xexpr = CONST_CAST_RTX (expr);
1243 validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true);
1244 }
1245 }
1246 }
1247
1248 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes,
1249 replace them with their value in the insn and add the side-effects
1250 as other sets to the insn. */
1251
1252 static void
1253 adjust_insn (basic_block bb, rtx_insn *insn)
1254 {
1255 struct adjust_mem_data amd;
1256 rtx set;
1257
1258 #ifdef HAVE_window_save
1259 /* If the target machine has an explicit window save instruction, the
1260 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */
1261 if (RTX_FRAME_RELATED_P (insn)
1262 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
1263 {
1264 unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
1265 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
1266 parm_reg_t *p;
1267
1268 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
1269 {
1270 XVECEXP (rtl, 0, i * 2)
1271 = gen_rtx_SET (p->incoming, p->outgoing);
1272 /* Do not clobber the attached DECL, but only the REG. */
1273 XVECEXP (rtl, 0, i * 2 + 1)
1274 = gen_rtx_CLOBBER (GET_MODE (p->outgoing),
1275 gen_raw_REG (GET_MODE (p->outgoing),
1276 REGNO (p->outgoing)));
1277 }
1278
1279 validate_change (NULL_RTX, &PATTERN (insn), rtl, true);
1280 return;
1281 }
1282 #endif
1283
1284 amd.mem_mode = VOIDmode;
1285 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
1286 amd.side_effects = NULL;
1287
1288 amd.store = true;
1289 note_stores (PATTERN (insn), adjust_mem_stores, &amd);
1290
1291 amd.store = false;
1292 if (GET_CODE (PATTERN (insn)) == PARALLEL
1293 && asm_noperands (PATTERN (insn)) > 0
1294 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1295 {
1296 rtx body, set0;
1297 int i;
1298
1299 /* inline-asm with multiple sets is tiny bit more complicated,
1300 because the 3 vectors in ASM_OPERANDS need to be shared between
1301 all ASM_OPERANDS in the instruction. adjust_mems will
1302 not touch ASM_OPERANDS other than the first one, asm_noperands
1303 test above needs to be called before that (otherwise it would fail)
1304 and afterwards this code fixes it up. */
1305 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1306 body = PATTERN (insn);
1307 set0 = XVECEXP (body, 0, 0);
1308 gcc_checking_assert (GET_CODE (set0) == SET
1309 && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS
1310 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0);
1311 for (i = 1; i < XVECLEN (body, 0); i++)
1312 if (GET_CODE (XVECEXP (body, 0, i)) != SET)
1313 break;
1314 else
1315 {
1316 set = XVECEXP (body, 0, i);
1317 gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS
1318 && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set))
1319 == i);
1320 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set))
1321 != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0))
1322 || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set))
1323 != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0))
1324 || ASM_OPERANDS_LABEL_VEC (SET_SRC (set))
1325 != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)))
1326 {
1327 rtx newsrc = shallow_copy_rtx (SET_SRC (set));
1328 ASM_OPERANDS_INPUT_VEC (newsrc)
1329 = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0));
1330 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc)
1331 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0));
1332 ASM_OPERANDS_LABEL_VEC (newsrc)
1333 = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0));
1334 validate_change (NULL_RTX, &SET_SRC (set), newsrc, true);
1335 }
1336 }
1337 }
1338 else
1339 note_uses (&PATTERN (insn), adjust_mem_uses, &amd);
1340
1341 /* For read-only MEMs containing some constant, prefer those
1342 constants. */
1343 set = single_set (insn);
1344 if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set)))
1345 {
1346 rtx note = find_reg_equal_equiv_note (insn);
1347
1348 if (note && CONSTANT_P (XEXP (note, 0)))
1349 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true);
1350 }
1351
1352 if (amd.side_effects)
1353 {
1354 rtx *pat, new_pat, s;
1355 int i, oldn, newn;
1356
1357 pat = &PATTERN (insn);
1358 if (GET_CODE (*pat) == COND_EXEC)
1359 pat = &COND_EXEC_CODE (*pat);
1360 if (GET_CODE (*pat) == PARALLEL)
1361 oldn = XVECLEN (*pat, 0);
1362 else
1363 oldn = 1;
1364 for (s = amd.side_effects, newn = 0; s; newn++)
1365 s = XEXP (s, 1);
1366 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn));
1367 if (GET_CODE (*pat) == PARALLEL)
1368 for (i = 0; i < oldn; i++)
1369 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i);
1370 else
1371 XVECEXP (new_pat, 0, 0) = *pat;
1372 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1))
1373 XVECEXP (new_pat, 0, i) = XEXP (s, 0);
1374 free_EXPR_LIST_list (&amd.side_effects);
1375 validate_change (NULL_RTX, pat, new_pat, true);
1376 }
1377 }
1378
1379 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */
1380 static inline rtx
1381 dv_as_rtx (decl_or_value dv)
1382 {
1383 tree decl;
1384
1385 if (dv_is_value_p (dv))
1386 return dv_as_value (dv);
1387
1388 decl = dv_as_decl (dv);
1389
1390 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL);
1391 return DECL_RTL_KNOWN_SET (decl);
1392 }
1393
1394 /* Return nonzero if a decl_or_value must not have more than one
1395 variable part. The returned value discriminates among various
1396 kinds of one-part DVs ccording to enum onepart_enum. */
1397 static inline onepart_enum_t
1398 dv_onepart_p (decl_or_value dv)
1399 {
1400 tree decl;
1401
1402 if (!MAY_HAVE_DEBUG_INSNS)
1403 return NOT_ONEPART;
1404
1405 if (dv_is_value_p (dv))
1406 return ONEPART_VALUE;
1407
1408 decl = dv_as_decl (dv);
1409
1410 if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
1411 return ONEPART_DEXPR;
1412
1413 if (target_for_debug_bind (decl) != NULL_TREE)
1414 return ONEPART_VDECL;
1415
1416 return NOT_ONEPART;
1417 }
1418
1419 /* Return the variable pool to be used for a dv of type ONEPART. */
1420 static inline pool_allocator <variable_def> &
1421 onepart_pool (onepart_enum_t onepart)
1422 {
1423 return onepart ? valvar_pool : var_pool;
1424 }
1425
1426 /* Build a decl_or_value out of a decl. */
1427 static inline decl_or_value
1428 dv_from_decl (tree decl)
1429 {
1430 decl_or_value dv;
1431 dv = decl;
1432 gcc_checking_assert (dv_is_decl_p (dv));
1433 return dv;
1434 }
1435
1436 /* Build a decl_or_value out of a value. */
1437 static inline decl_or_value
1438 dv_from_value (rtx value)
1439 {
1440 decl_or_value dv;
1441 dv = value;
1442 gcc_checking_assert (dv_is_value_p (dv));
1443 return dv;
1444 }
1445
1446 /* Return a value or the decl of a debug_expr as a decl_or_value. */
1447 static inline decl_or_value
1448 dv_from_rtx (rtx x)
1449 {
1450 decl_or_value dv;
1451
1452 switch (GET_CODE (x))
1453 {
1454 case DEBUG_EXPR:
1455 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x));
1456 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x);
1457 break;
1458
1459 case VALUE:
1460 dv = dv_from_value (x);
1461 break;
1462
1463 default:
1464 gcc_unreachable ();
1465 }
1466
1467 return dv;
1468 }
1469
1470 extern void debug_dv (decl_or_value dv);
1471
1472 DEBUG_FUNCTION void
1473 debug_dv (decl_or_value dv)
1474 {
1475 if (dv_is_value_p (dv))
1476 debug_rtx (dv_as_value (dv));
1477 else
1478 debug_generic_stmt (dv_as_decl (dv));
1479 }
1480
1481 static void loc_exp_dep_clear (variable var);
1482
1483 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */
1484
1485 static void
1486 variable_htab_free (void *elem)
1487 {
1488 int i;
1489 variable var = (variable) elem;
1490 location_chain node, next;
1491
1492 gcc_checking_assert (var->refcount > 0);
1493
1494 var->refcount--;
1495 if (var->refcount > 0)
1496 return;
1497
1498 for (i = 0; i < var->n_var_parts; i++)
1499 {
1500 for (node = var->var_part[i].loc_chain; node; node = next)
1501 {
1502 next = node->next;
1503 delete node;
1504 }
1505 var->var_part[i].loc_chain = NULL;
1506 }
1507 if (var->onepart && VAR_LOC_1PAUX (var))
1508 {
1509 loc_exp_dep_clear (var);
1510 if (VAR_LOC_DEP_LST (var))
1511 VAR_LOC_DEP_LST (var)->pprev = NULL;
1512 XDELETE (VAR_LOC_1PAUX (var));
1513 /* These may be reused across functions, so reset
1514 e.g. NO_LOC_P. */
1515 if (var->onepart == ONEPART_DEXPR)
1516 set_dv_changed (var->dv, true);
1517 }
1518 onepart_pool (var->onepart).remove (var);
1519 }
1520
1521 /* Initialize the set (array) SET of attrs to empty lists. */
1522
1523 static void
1524 init_attrs_list_set (attrs *set)
1525 {
1526 int i;
1527
1528 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1529 set[i] = NULL;
1530 }
1531
1532 /* Make the list *LISTP empty. */
1533
1534 static void
1535 attrs_list_clear (attrs *listp)
1536 {
1537 attrs list, next;
1538
1539 for (list = *listp; list; list = next)
1540 {
1541 next = list->next;
1542 delete list;
1543 }
1544 *listp = NULL;
1545 }
1546
1547 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */
1548
1549 static attrs
1550 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset)
1551 {
1552 for (; list; list = list->next)
1553 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset)
1554 return list;
1555 return NULL;
1556 }
1557
1558 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */
1559
1560 static void
1561 attrs_list_insert (attrs *listp, decl_or_value dv,
1562 HOST_WIDE_INT offset, rtx loc)
1563 {
1564 attrs list = new attrs_def;
1565 list->loc = loc;
1566 list->dv = dv;
1567 list->offset = offset;
1568 list->next = *listp;
1569 *listp = list;
1570 }
1571
1572 /* Copy all nodes from SRC and create a list *DSTP of the copies. */
1573
1574 static void
1575 attrs_list_copy (attrs *dstp, attrs src)
1576 {
1577 attrs_list_clear (dstp);
1578 for (; src; src = src->next)
1579 {
1580 attrs n = new attrs_def;
1581 n->loc = src->loc;
1582 n->dv = src->dv;
1583 n->offset = src->offset;
1584 n->next = *dstp;
1585 *dstp = n;
1586 }
1587 }
1588
1589 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */
1590
1591 static void
1592 attrs_list_union (attrs *dstp, attrs src)
1593 {
1594 for (; src; src = src->next)
1595 {
1596 if (!attrs_list_member (*dstp, src->dv, src->offset))
1597 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1598 }
1599 }
1600
1601 /* Combine nodes that are not onepart nodes from SRC and SRC2 into
1602 *DSTP. */
1603
1604 static void
1605 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2)
1606 {
1607 gcc_assert (!*dstp);
1608 for (; src; src = src->next)
1609 {
1610 if (!dv_onepart_p (src->dv))
1611 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1612 }
1613 for (src = src2; src; src = src->next)
1614 {
1615 if (!dv_onepart_p (src->dv)
1616 && !attrs_list_member (*dstp, src->dv, src->offset))
1617 attrs_list_insert (dstp, src->dv, src->offset, src->loc);
1618 }
1619 }
1620
1621 /* Shared hashtable support. */
1622
1623 /* Return true if VARS is shared. */
1624
1625 static inline bool
1626 shared_hash_shared (shared_hash vars)
1627 {
1628 return vars->refcount > 1;
1629 }
1630
1631 /* Return the hash table for VARS. */
1632
1633 static inline variable_table_type *
1634 shared_hash_htab (shared_hash vars)
1635 {
1636 return vars->htab;
1637 }
1638
1639 /* Return true if VAR is shared, or maybe because VARS is shared. */
1640
1641 static inline bool
1642 shared_var_p (variable var, shared_hash vars)
1643 {
1644 /* Don't count an entry in the changed_variables table as a duplicate. */
1645 return ((var->refcount > 1 + (int) var->in_changed_variables)
1646 || shared_hash_shared (vars));
1647 }
1648
1649 /* Copy variables into a new hash table. */
1650
1651 static shared_hash
1652 shared_hash_unshare (shared_hash vars)
1653 {
1654 shared_hash new_vars = new shared_hash_def;
1655 gcc_assert (vars->refcount > 1);
1656 new_vars->refcount = 1;
1657 new_vars->htab = new variable_table_type (vars->htab->elements () + 3);
1658 vars_copy (new_vars->htab, vars->htab);
1659 vars->refcount--;
1660 return new_vars;
1661 }
1662
1663 /* Increment reference counter on VARS and return it. */
1664
1665 static inline shared_hash
1666 shared_hash_copy (shared_hash vars)
1667 {
1668 vars->refcount++;
1669 return vars;
1670 }
1671
1672 /* Decrement reference counter and destroy hash table if not shared
1673 anymore. */
1674
1675 static void
1676 shared_hash_destroy (shared_hash vars)
1677 {
1678 gcc_checking_assert (vars->refcount > 0);
1679 if (--vars->refcount == 0)
1680 {
1681 delete vars->htab;
1682 delete vars;
1683 }
1684 }
1685
1686 /* Unshare *PVARS if shared and return slot for DV. If INS is
1687 INSERT, insert it if not already present. */
1688
1689 static inline variable_def **
1690 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv,
1691 hashval_t dvhash, enum insert_option ins)
1692 {
1693 if (shared_hash_shared (*pvars))
1694 *pvars = shared_hash_unshare (*pvars);
1695 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins);
1696 }
1697
1698 static inline variable_def **
1699 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv,
1700 enum insert_option ins)
1701 {
1702 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins);
1703 }
1704
1705 /* Return slot for DV, if it is already present in the hash table.
1706 If it is not present, insert it only VARS is not shared, otherwise
1707 return NULL. */
1708
1709 static inline variable_def **
1710 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1711 {
1712 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash,
1713 shared_hash_shared (vars)
1714 ? NO_INSERT : INSERT);
1715 }
1716
1717 static inline variable_def **
1718 shared_hash_find_slot (shared_hash vars, decl_or_value dv)
1719 {
1720 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv));
1721 }
1722
1723 /* Return slot for DV only if it is already present in the hash table. */
1724
1725 static inline variable_def **
1726 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv,
1727 hashval_t dvhash)
1728 {
1729 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT);
1730 }
1731
1732 static inline variable_def **
1733 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv)
1734 {
1735 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv));
1736 }
1737
1738 /* Return variable for DV or NULL if not already present in the hash
1739 table. */
1740
1741 static inline variable
1742 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash)
1743 {
1744 return shared_hash_htab (vars)->find_with_hash (dv, dvhash);
1745 }
1746
1747 static inline variable
1748 shared_hash_find (shared_hash vars, decl_or_value dv)
1749 {
1750 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv));
1751 }
1752
1753 /* Return true if TVAL is better than CVAL as a canonival value. We
1754 choose lowest-numbered VALUEs, using the RTX address as a
1755 tie-breaker. The idea is to arrange them into a star topology,
1756 such that all of them are at most one step away from the canonical
1757 value, and the canonical value has backlinks to all of them, in
1758 addition to all the actual locations. We don't enforce this
1759 topology throughout the entire dataflow analysis, though.
1760 */
1761
1762 static inline bool
1763 canon_value_cmp (rtx tval, rtx cval)
1764 {
1765 return !cval
1766 || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid;
1767 }
1768
1769 static bool dst_can_be_shared;
1770
1771 /* Return a copy of a variable VAR and insert it to dataflow set SET. */
1772
1773 static variable_def **
1774 unshare_variable (dataflow_set *set, variable_def **slot, variable var,
1775 enum var_init_status initialized)
1776 {
1777 variable new_var;
1778 int i;
1779
1780 new_var = onepart_pool (var->onepart).allocate ();
1781 new_var->dv = var->dv;
1782 new_var->refcount = 1;
1783 var->refcount--;
1784 new_var->n_var_parts = var->n_var_parts;
1785 new_var->onepart = var->onepart;
1786 new_var->in_changed_variables = false;
1787
1788 if (! flag_var_tracking_uninit)
1789 initialized = VAR_INIT_STATUS_INITIALIZED;
1790
1791 for (i = 0; i < var->n_var_parts; i++)
1792 {
1793 location_chain node;
1794 location_chain *nextp;
1795
1796 if (i == 0 && var->onepart)
1797 {
1798 /* One-part auxiliary data is only used while emitting
1799 notes, so propagate it to the new variable in the active
1800 dataflow set. If we're not emitting notes, this will be
1801 a no-op. */
1802 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes);
1803 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var);
1804 VAR_LOC_1PAUX (var) = NULL;
1805 }
1806 else
1807 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i);
1808 nextp = &new_var->var_part[i].loc_chain;
1809 for (node = var->var_part[i].loc_chain; node; node = node->next)
1810 {
1811 location_chain new_lc;
1812
1813 new_lc = new location_chain_def;
1814 new_lc->next = NULL;
1815 if (node->init > initialized)
1816 new_lc->init = node->init;
1817 else
1818 new_lc->init = initialized;
1819 if (node->set_src && !(MEM_P (node->set_src)))
1820 new_lc->set_src = node->set_src;
1821 else
1822 new_lc->set_src = NULL;
1823 new_lc->loc = node->loc;
1824
1825 *nextp = new_lc;
1826 nextp = &new_lc->next;
1827 }
1828
1829 new_var->var_part[i].cur_loc = var->var_part[i].cur_loc;
1830 }
1831
1832 dst_can_be_shared = false;
1833 if (shared_hash_shared (set->vars))
1834 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, NO_INSERT);
1835 else if (set->traversed_vars && set->vars != set->traversed_vars)
1836 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
1837 *slot = new_var;
1838 if (var->in_changed_variables)
1839 {
1840 variable_def **cslot
1841 = changed_variables->find_slot_with_hash (var->dv,
1842 dv_htab_hash (var->dv),
1843 NO_INSERT);
1844 gcc_assert (*cslot == (void *) var);
1845 var->in_changed_variables = false;
1846 variable_htab_free (var);
1847 *cslot = new_var;
1848 new_var->in_changed_variables = true;
1849 }
1850 return slot;
1851 }
1852
1853 /* Copy all variables from hash table SRC to hash table DST. */
1854
1855 static void
1856 vars_copy (variable_table_type *dst, variable_table_type *src)
1857 {
1858 variable_iterator_type hi;
1859 variable var;
1860
1861 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi)
1862 {
1863 variable_def **dstp;
1864 var->refcount++;
1865 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv),
1866 INSERT);
1867 *dstp = var;
1868 }
1869 }
1870
1871 /* Map a decl to its main debug decl. */
1872
1873 static inline tree
1874 var_debug_decl (tree decl)
1875 {
1876 if (decl && TREE_CODE (decl) == VAR_DECL
1877 && DECL_HAS_DEBUG_EXPR_P (decl))
1878 {
1879 tree debugdecl = DECL_DEBUG_EXPR (decl);
1880 if (DECL_P (debugdecl))
1881 decl = debugdecl;
1882 }
1883
1884 return decl;
1885 }
1886
1887 /* Set the register LOC to contain DV, OFFSET. */
1888
1889 static void
1890 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1891 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
1892 enum insert_option iopt)
1893 {
1894 attrs node;
1895 bool decl_p = dv_is_decl_p (dv);
1896
1897 if (decl_p)
1898 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
1899
1900 for (node = set->regs[REGNO (loc)]; node; node = node->next)
1901 if (dv_as_opaque (node->dv) == dv_as_opaque (dv)
1902 && node->offset == offset)
1903 break;
1904 if (!node)
1905 attrs_list_insert (&set->regs[REGNO (loc)], dv, offset, loc);
1906 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
1907 }
1908
1909 /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */
1910
1911 static void
1912 var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
1913 rtx set_src)
1914 {
1915 tree decl = REG_EXPR (loc);
1916 HOST_WIDE_INT offset = REG_OFFSET (loc);
1917
1918 var_reg_decl_set (set, loc, initialized,
1919 dv_from_decl (decl), offset, set_src, INSERT);
1920 }
1921
1922 static enum var_init_status
1923 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv)
1924 {
1925 variable var;
1926 int i;
1927 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN;
1928
1929 if (! flag_var_tracking_uninit)
1930 return VAR_INIT_STATUS_INITIALIZED;
1931
1932 var = shared_hash_find (set->vars, dv);
1933 if (var)
1934 {
1935 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++)
1936 {
1937 location_chain nextp;
1938 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next)
1939 if (rtx_equal_p (nextp->loc, loc))
1940 {
1941 ret_val = nextp->init;
1942 break;
1943 }
1944 }
1945 }
1946
1947 return ret_val;
1948 }
1949
1950 /* Delete current content of register LOC in dataflow set SET and set
1951 the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If
1952 MODIFY is true, any other live copies of the same variable part are
1953 also deleted from the dataflow set, otherwise the variable part is
1954 assumed to be copied from another location holding the same
1955 part. */
1956
1957 static void
1958 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify,
1959 enum var_init_status initialized, rtx set_src)
1960 {
1961 tree decl = REG_EXPR (loc);
1962 HOST_WIDE_INT offset = REG_OFFSET (loc);
1963 attrs node, next;
1964 attrs *nextp;
1965
1966 decl = var_debug_decl (decl);
1967
1968 if (initialized == VAR_INIT_STATUS_UNKNOWN)
1969 initialized = get_init_value (set, loc, dv_from_decl (decl));
1970
1971 nextp = &set->regs[REGNO (loc)];
1972 for (node = *nextp; node; node = next)
1973 {
1974 next = node->next;
1975 if (dv_as_opaque (node->dv) != decl || node->offset != offset)
1976 {
1977 delete_variable_part (set, node->loc, node->dv, node->offset);
1978 delete node;
1979 *nextp = next;
1980 }
1981 else
1982 {
1983 node->loc = loc;
1984 nextp = &node->next;
1985 }
1986 }
1987 if (modify)
1988 clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src);
1989 var_reg_set (set, loc, initialized, set_src);
1990 }
1991
1992 /* Delete the association of register LOC in dataflow set SET with any
1993 variables that aren't onepart. If CLOBBER is true, also delete any
1994 other live copies of the same variable part, and delete the
1995 association with onepart dvs too. */
1996
1997 static void
1998 var_reg_delete (dataflow_set *set, rtx loc, bool clobber)
1999 {
2000 attrs *nextp = &set->regs[REGNO (loc)];
2001 attrs node, next;
2002
2003 if (clobber)
2004 {
2005 tree decl = REG_EXPR (loc);
2006 HOST_WIDE_INT offset = REG_OFFSET (loc);
2007
2008 decl = var_debug_decl (decl);
2009
2010 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2011 }
2012
2013 for (node = *nextp; node; node = next)
2014 {
2015 next = node->next;
2016 if (clobber || !dv_onepart_p (node->dv))
2017 {
2018 delete_variable_part (set, node->loc, node->dv, node->offset);
2019 delete node;
2020 *nextp = next;
2021 }
2022 else
2023 nextp = &node->next;
2024 }
2025 }
2026
2027 /* Delete content of register with number REGNO in dataflow set SET. */
2028
2029 static void
2030 var_regno_delete (dataflow_set *set, int regno)
2031 {
2032 attrs *reg = &set->regs[regno];
2033 attrs node, next;
2034
2035 for (node = *reg; node; node = next)
2036 {
2037 next = node->next;
2038 delete_variable_part (set, node->loc, node->dv, node->offset);
2039 delete node;
2040 }
2041 *reg = NULL;
2042 }
2043
2044 /* Return true if I is the negated value of a power of two. */
2045 static bool
2046 negative_power_of_two_p (HOST_WIDE_INT i)
2047 {
2048 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i;
2049 return x == (x & -x);
2050 }
2051
2052 /* Strip constant offsets and alignments off of LOC. Return the base
2053 expression. */
2054
2055 static rtx
2056 vt_get_canonicalize_base (rtx loc)
2057 {
2058 while ((GET_CODE (loc) == PLUS
2059 || GET_CODE (loc) == AND)
2060 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2061 && (GET_CODE (loc) != AND
2062 || negative_power_of_two_p (INTVAL (XEXP (loc, 1)))))
2063 loc = XEXP (loc, 0);
2064
2065 return loc;
2066 }
2067
2068 /* This caches canonicalized addresses for VALUEs, computed using
2069 information in the global cselib table. */
2070 static hash_map<rtx, rtx> *global_get_addr_cache;
2071
2072 /* This caches canonicalized addresses for VALUEs, computed using
2073 information from the global cache and information pertaining to a
2074 basic block being analyzed. */
2075 static hash_map<rtx, rtx> *local_get_addr_cache;
2076
2077 static rtx vt_canonicalize_addr (dataflow_set *, rtx);
2078
2079 /* Return the canonical address for LOC, that must be a VALUE, using a
2080 cached global equivalence or computing it and storing it in the
2081 global cache. */
2082
2083 static rtx
2084 get_addr_from_global_cache (rtx const loc)
2085 {
2086 rtx x;
2087
2088 gcc_checking_assert (GET_CODE (loc) == VALUE);
2089
2090 bool existed;
2091 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed);
2092 if (existed)
2093 return *slot;
2094
2095 x = canon_rtx (get_addr (loc));
2096
2097 /* Tentative, avoiding infinite recursion. */
2098 *slot = x;
2099
2100 if (x != loc)
2101 {
2102 rtx nx = vt_canonicalize_addr (NULL, x);
2103 if (nx != x)
2104 {
2105 /* The table may have moved during recursion, recompute
2106 SLOT. */
2107 *global_get_addr_cache->get (loc) = x = nx;
2108 }
2109 }
2110
2111 return x;
2112 }
2113
2114 /* Return the canonical address for LOC, that must be a VALUE, using a
2115 cached local equivalence or computing it and storing it in the
2116 local cache. */
2117
2118 static rtx
2119 get_addr_from_local_cache (dataflow_set *set, rtx const loc)
2120 {
2121 rtx x;
2122 decl_or_value dv;
2123 variable var;
2124 location_chain l;
2125
2126 gcc_checking_assert (GET_CODE (loc) == VALUE);
2127
2128 bool existed;
2129 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed);
2130 if (existed)
2131 return *slot;
2132
2133 x = get_addr_from_global_cache (loc);
2134
2135 /* Tentative, avoiding infinite recursion. */
2136 *slot = x;
2137
2138 /* Recurse to cache local expansion of X, or if we need to search
2139 for a VALUE in the expansion. */
2140 if (x != loc)
2141 {
2142 rtx nx = vt_canonicalize_addr (set, x);
2143 if (nx != x)
2144 {
2145 slot = local_get_addr_cache->get (loc);
2146 *slot = x = nx;
2147 }
2148 return x;
2149 }
2150
2151 dv = dv_from_rtx (x);
2152 var = shared_hash_find (set->vars, dv);
2153 if (!var)
2154 return x;
2155
2156 /* Look for an improved equivalent expression. */
2157 for (l = var->var_part[0].loc_chain; l; l = l->next)
2158 {
2159 rtx base = vt_get_canonicalize_base (l->loc);
2160 if (GET_CODE (base) == VALUE
2161 && canon_value_cmp (base, loc))
2162 {
2163 rtx nx = vt_canonicalize_addr (set, l->loc);
2164 if (x != nx)
2165 {
2166 slot = local_get_addr_cache->get (loc);
2167 *slot = x = nx;
2168 }
2169 break;
2170 }
2171 }
2172
2173 return x;
2174 }
2175
2176 /* Canonicalize LOC using equivalences from SET in addition to those
2177 in the cselib static table. It expects a VALUE-based expression,
2178 and it will only substitute VALUEs with other VALUEs or
2179 function-global equivalences, so that, if two addresses have base
2180 VALUEs that are locally or globally related in ways that
2181 memrefs_conflict_p cares about, they will both canonicalize to
2182 expressions that have the same base VALUE.
2183
2184 The use of VALUEs as canonical base addresses enables the canonical
2185 RTXs to remain unchanged globally, if they resolve to a constant,
2186 or throughout a basic block otherwise, so that they can be cached
2187 and the cache needs not be invalidated when REGs, MEMs or such
2188 change. */
2189
2190 static rtx
2191 vt_canonicalize_addr (dataflow_set *set, rtx oloc)
2192 {
2193 HOST_WIDE_INT ofst = 0;
2194 machine_mode mode = GET_MODE (oloc);
2195 rtx loc = oloc;
2196 rtx x;
2197 bool retry = true;
2198
2199 while (retry)
2200 {
2201 while (GET_CODE (loc) == PLUS
2202 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2203 {
2204 ofst += INTVAL (XEXP (loc, 1));
2205 loc = XEXP (loc, 0);
2206 }
2207
2208 /* Alignment operations can't normally be combined, so just
2209 canonicalize the base and we're done. We'll normally have
2210 only one stack alignment anyway. */
2211 if (GET_CODE (loc) == AND
2212 && GET_CODE (XEXP (loc, 1)) == CONST_INT
2213 && negative_power_of_two_p (INTVAL (XEXP (loc, 1))))
2214 {
2215 x = vt_canonicalize_addr (set, XEXP (loc, 0));
2216 if (x != XEXP (loc, 0))
2217 loc = gen_rtx_AND (mode, x, XEXP (loc, 1));
2218 retry = false;
2219 }
2220
2221 if (GET_CODE (loc) == VALUE)
2222 {
2223 if (set)
2224 loc = get_addr_from_local_cache (set, loc);
2225 else
2226 loc = get_addr_from_global_cache (loc);
2227
2228 /* Consolidate plus_constants. */
2229 while (ofst && GET_CODE (loc) == PLUS
2230 && GET_CODE (XEXP (loc, 1)) == CONST_INT)
2231 {
2232 ofst += INTVAL (XEXP (loc, 1));
2233 loc = XEXP (loc, 0);
2234 }
2235
2236 retry = false;
2237 }
2238 else
2239 {
2240 x = canon_rtx (loc);
2241 if (retry)
2242 retry = (x != loc);
2243 loc = x;
2244 }
2245 }
2246
2247 /* Add OFST back in. */
2248 if (ofst)
2249 {
2250 /* Don't build new RTL if we can help it. */
2251 if (GET_CODE (oloc) == PLUS
2252 && XEXP (oloc, 0) == loc
2253 && INTVAL (XEXP (oloc, 1)) == ofst)
2254 return oloc;
2255
2256 loc = plus_constant (mode, loc, ofst);
2257 }
2258
2259 return loc;
2260 }
2261
2262 /* Return true iff there's a true dependence between MLOC and LOC.
2263 MADDR must be a canonicalized version of MLOC's address. */
2264
2265 static inline bool
2266 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc)
2267 {
2268 if (GET_CODE (loc) != MEM)
2269 return false;
2270
2271 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2272 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr))
2273 return false;
2274
2275 return true;
2276 }
2277
2278 /* Hold parameters for the hashtab traversal function
2279 drop_overlapping_mem_locs, see below. */
2280
2281 struct overlapping_mems
2282 {
2283 dataflow_set *set;
2284 rtx loc, addr;
2285 };
2286
2287 /* Remove all MEMs that overlap with COMS->LOC from the location list
2288 of a hash table entry for a value. COMS->ADDR must be a
2289 canonicalized form of COMS->LOC's address, and COMS->LOC must be
2290 canonicalized itself. */
2291
2292 int
2293 drop_overlapping_mem_locs (variable_def **slot, overlapping_mems *coms)
2294 {
2295 dataflow_set *set = coms->set;
2296 rtx mloc = coms->loc, addr = coms->addr;
2297 variable var = *slot;
2298
2299 if (var->onepart == ONEPART_VALUE)
2300 {
2301 location_chain loc, *locp;
2302 bool changed = false;
2303 rtx cur_loc;
2304
2305 gcc_assert (var->n_var_parts == 1);
2306
2307 if (shared_var_p (var, set->vars))
2308 {
2309 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
2310 if (vt_canon_true_dep (set, mloc, addr, loc->loc))
2311 break;
2312
2313 if (!loc)
2314 return 1;
2315
2316 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
2317 var = *slot;
2318 gcc_assert (var->n_var_parts == 1);
2319 }
2320
2321 if (VAR_LOC_1PAUX (var))
2322 cur_loc = VAR_LOC_FROM (var);
2323 else
2324 cur_loc = var->var_part[0].cur_loc;
2325
2326 for (locp = &var->var_part[0].loc_chain, loc = *locp;
2327 loc; loc = *locp)
2328 {
2329 if (!vt_canon_true_dep (set, mloc, addr, loc->loc))
2330 {
2331 locp = &loc->next;
2332 continue;
2333 }
2334
2335 *locp = loc->next;
2336 /* If we have deleted the location which was last emitted
2337 we have to emit new location so add the variable to set
2338 of changed variables. */
2339 if (cur_loc == loc->loc)
2340 {
2341 changed = true;
2342 var->var_part[0].cur_loc = NULL;
2343 if (VAR_LOC_1PAUX (var))
2344 VAR_LOC_FROM (var) = NULL;
2345 }
2346 delete loc;
2347 }
2348
2349 if (!var->var_part[0].loc_chain)
2350 {
2351 var->n_var_parts--;
2352 changed = true;
2353 }
2354 if (changed)
2355 variable_was_changed (var, set);
2356 }
2357
2358 return 1;
2359 }
2360
2361 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */
2362
2363 static void
2364 clobber_overlapping_mems (dataflow_set *set, rtx loc)
2365 {
2366 struct overlapping_mems coms;
2367
2368 gcc_checking_assert (GET_CODE (loc) == MEM);
2369
2370 coms.set = set;
2371 coms.loc = canon_rtx (loc);
2372 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0));
2373
2374 set->traversed_vars = set->vars;
2375 shared_hash_htab (set->vars)
2376 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms);
2377 set->traversed_vars = NULL;
2378 }
2379
2380 /* Set the location of DV, OFFSET as the MEM LOC. */
2381
2382 static void
2383 var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2384 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src,
2385 enum insert_option iopt)
2386 {
2387 if (dv_is_decl_p (dv))
2388 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv)));
2389
2390 set_variable_part (set, loc, dv, offset, initialized, set_src, iopt);
2391 }
2392
2393 /* Set the location part of variable MEM_EXPR (LOC) in dataflow set
2394 SET to LOC.
2395 Adjust the address first if it is stack pointer based. */
2396
2397 static void
2398 var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized,
2399 rtx set_src)
2400 {
2401 tree decl = MEM_EXPR (loc);
2402 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2403
2404 var_mem_decl_set (set, loc, initialized,
2405 dv_from_decl (decl), offset, set_src, INSERT);
2406 }
2407
2408 /* Delete and set the location part of variable MEM_EXPR (LOC) in
2409 dataflow set SET to LOC. If MODIFY is true, any other live copies
2410 of the same variable part are also deleted from the dataflow set,
2411 otherwise the variable part is assumed to be copied from another
2412 location holding the same part.
2413 Adjust the address first if it is stack pointer based. */
2414
2415 static void
2416 var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify,
2417 enum var_init_status initialized, rtx set_src)
2418 {
2419 tree decl = MEM_EXPR (loc);
2420 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2421
2422 clobber_overlapping_mems (set, loc);
2423 decl = var_debug_decl (decl);
2424
2425 if (initialized == VAR_INIT_STATUS_UNKNOWN)
2426 initialized = get_init_value (set, loc, dv_from_decl (decl));
2427
2428 if (modify)
2429 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src);
2430 var_mem_set (set, loc, initialized, set_src);
2431 }
2432
2433 /* Delete the location part LOC from dataflow set SET. If CLOBBER is
2434 true, also delete any other live copies of the same variable part.
2435 Adjust the address first if it is stack pointer based. */
2436
2437 static void
2438 var_mem_delete (dataflow_set *set, rtx loc, bool clobber)
2439 {
2440 tree decl = MEM_EXPR (loc);
2441 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc);
2442
2443 clobber_overlapping_mems (set, loc);
2444 decl = var_debug_decl (decl);
2445 if (clobber)
2446 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL);
2447 delete_variable_part (set, loc, dv_from_decl (decl), offset);
2448 }
2449
2450 /* Return true if LOC should not be expanded for location expressions,
2451 or used in them. */
2452
2453 static inline bool
2454 unsuitable_loc (rtx loc)
2455 {
2456 switch (GET_CODE (loc))
2457 {
2458 case PC:
2459 case SCRATCH:
2460 case CC0:
2461 case ASM_INPUT:
2462 case ASM_OPERANDS:
2463 return true;
2464
2465 default:
2466 return false;
2467 }
2468 }
2469
2470 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values
2471 bound to it. */
2472
2473 static inline void
2474 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified)
2475 {
2476 if (REG_P (loc))
2477 {
2478 if (modified)
2479 var_regno_delete (set, REGNO (loc));
2480 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2481 dv_from_value (val), 0, NULL_RTX, INSERT);
2482 }
2483 else if (MEM_P (loc))
2484 {
2485 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs;
2486
2487 if (modified)
2488 clobber_overlapping_mems (set, loc);
2489
2490 if (l && GET_CODE (l->loc) == VALUE)
2491 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs;
2492
2493 /* If this MEM is a global constant, we don't need it in the
2494 dynamic tables. ??? We should test this before emitting the
2495 micro-op in the first place. */
2496 while (l)
2497 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0))
2498 break;
2499 else
2500 l = l->next;
2501
2502 if (!l)
2503 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED,
2504 dv_from_value (val), 0, NULL_RTX, INSERT);
2505 }
2506 else
2507 {
2508 /* Other kinds of equivalences are necessarily static, at least
2509 so long as we do not perform substitutions while merging
2510 expressions. */
2511 gcc_unreachable ();
2512 set_variable_part (set, loc, dv_from_value (val), 0,
2513 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2514 }
2515 }
2516
2517 /* Bind a value to a location it was just stored in. If MODIFIED
2518 holds, assume the location was modified, detaching it from any
2519 values bound to it. */
2520
2521 static void
2522 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn,
2523 bool modified)
2524 {
2525 cselib_val *v = CSELIB_VAL_PTR (val);
2526
2527 gcc_assert (cselib_preserved_value_p (v));
2528
2529 if (dump_file)
2530 {
2531 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0);
2532 print_inline_rtx (dump_file, loc, 0);
2533 fprintf (dump_file, " evaluates to ");
2534 print_inline_rtx (dump_file, val, 0);
2535 if (v->locs)
2536 {
2537 struct elt_loc_list *l;
2538 for (l = v->locs; l; l = l->next)
2539 {
2540 fprintf (dump_file, "\n%i: ", INSN_UID (l->setting_insn));
2541 print_inline_rtx (dump_file, l->loc, 0);
2542 }
2543 }
2544 fprintf (dump_file, "\n");
2545 }
2546
2547 gcc_checking_assert (!unsuitable_loc (loc));
2548
2549 val_bind (set, val, loc, modified);
2550 }
2551
2552 /* Clear (canonical address) slots that reference X. */
2553
2554 bool
2555 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x)
2556 {
2557 if (vt_get_canonicalize_base (*slot) == x)
2558 *slot = NULL;
2559 return true;
2560 }
2561
2562 /* Reset this node, detaching all its equivalences. Return the slot
2563 in the variable hash table that holds dv, if there is one. */
2564
2565 static void
2566 val_reset (dataflow_set *set, decl_or_value dv)
2567 {
2568 variable var = shared_hash_find (set->vars, dv) ;
2569 location_chain node;
2570 rtx cval;
2571
2572 if (!var || !var->n_var_parts)
2573 return;
2574
2575 gcc_assert (var->n_var_parts == 1);
2576
2577 if (var->onepart == ONEPART_VALUE)
2578 {
2579 rtx x = dv_as_value (dv);
2580
2581 /* Relationships in the global cache don't change, so reset the
2582 local cache entry only. */
2583 rtx *slot = local_get_addr_cache->get (x);
2584 if (slot)
2585 {
2586 /* If the value resolved back to itself, odds are that other
2587 values may have cached it too. These entries now refer
2588 to the old X, so detach them too. Entries that used the
2589 old X but resolved to something else remain ok as long as
2590 that something else isn't also reset. */
2591 if (*slot == x)
2592 local_get_addr_cache
2593 ->traverse<rtx, local_get_addr_clear_given_value> (x);
2594 *slot = NULL;
2595 }
2596 }
2597
2598 cval = NULL;
2599 for (node = var->var_part[0].loc_chain; node; node = node->next)
2600 if (GET_CODE (node->loc) == VALUE
2601 && canon_value_cmp (node->loc, cval))
2602 cval = node->loc;
2603
2604 for (node = var->var_part[0].loc_chain; node; node = node->next)
2605 if (GET_CODE (node->loc) == VALUE && cval != node->loc)
2606 {
2607 /* Redirect the equivalence link to the new canonical
2608 value, or simply remove it if it would point at
2609 itself. */
2610 if (cval)
2611 set_variable_part (set, cval, dv_from_value (node->loc),
2612 0, node->init, node->set_src, NO_INSERT);
2613 delete_variable_part (set, dv_as_value (dv),
2614 dv_from_value (node->loc), 0);
2615 }
2616
2617 if (cval)
2618 {
2619 decl_or_value cdv = dv_from_value (cval);
2620
2621 /* Keep the remaining values connected, accummulating links
2622 in the canonical value. */
2623 for (node = var->var_part[0].loc_chain; node; node = node->next)
2624 {
2625 if (node->loc == cval)
2626 continue;
2627 else if (GET_CODE (node->loc) == REG)
2628 var_reg_decl_set (set, node->loc, node->init, cdv, 0,
2629 node->set_src, NO_INSERT);
2630 else if (GET_CODE (node->loc) == MEM)
2631 var_mem_decl_set (set, node->loc, node->init, cdv, 0,
2632 node->set_src, NO_INSERT);
2633 else
2634 set_variable_part (set, node->loc, cdv, 0,
2635 node->init, node->set_src, NO_INSERT);
2636 }
2637 }
2638
2639 /* We remove this last, to make sure that the canonical value is not
2640 removed to the point of requiring reinsertion. */
2641 if (cval)
2642 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0);
2643
2644 clobber_variable_part (set, NULL, dv, 0, NULL);
2645 }
2646
2647 /* Find the values in a given location and map the val to another
2648 value, if it is unique, or add the location as one holding the
2649 value. */
2650
2651 static void
2652 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn)
2653 {
2654 decl_or_value dv = dv_from_value (val);
2655
2656 if (dump_file && (dump_flags & TDF_DETAILS))
2657 {
2658 if (insn)
2659 fprintf (dump_file, "%i: ", INSN_UID (insn));
2660 else
2661 fprintf (dump_file, "head: ");
2662 print_inline_rtx (dump_file, val, 0);
2663 fputs (" is at ", dump_file);
2664 print_inline_rtx (dump_file, loc, 0);
2665 fputc ('\n', dump_file);
2666 }
2667
2668 val_reset (set, dv);
2669
2670 gcc_checking_assert (!unsuitable_loc (loc));
2671
2672 if (REG_P (loc))
2673 {
2674 attrs node, found = NULL;
2675
2676 for (node = set->regs[REGNO (loc)]; node; node = node->next)
2677 if (dv_is_value_p (node->dv)
2678 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc))
2679 {
2680 found = node;
2681
2682 /* Map incoming equivalences. ??? Wouldn't it be nice if
2683 we just started sharing the location lists? Maybe a
2684 circular list ending at the value itself or some
2685 such. */
2686 set_variable_part (set, dv_as_value (node->dv),
2687 dv_from_value (val), node->offset,
2688 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2689 set_variable_part (set, val, node->dv, node->offset,
2690 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT);
2691 }
2692
2693 /* If we didn't find any equivalence, we need to remember that
2694 this value is held in the named register. */
2695 if (found)
2696 return;
2697 }
2698 /* ??? Attempt to find and merge equivalent MEMs or other
2699 expressions too. */
2700
2701 val_bind (set, val, loc, false);
2702 }
2703
2704 /* Initialize dataflow set SET to be empty.
2705 VARS_SIZE is the initial size of hash table VARS. */
2706
2707 static void
2708 dataflow_set_init (dataflow_set *set)
2709 {
2710 init_attrs_list_set (set->regs);
2711 set->vars = shared_hash_copy (empty_shared_hash);
2712 set->stack_adjust = 0;
2713 set->traversed_vars = NULL;
2714 }
2715
2716 /* Delete the contents of dataflow set SET. */
2717
2718 static void
2719 dataflow_set_clear (dataflow_set *set)
2720 {
2721 int i;
2722
2723 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2724 attrs_list_clear (&set->regs[i]);
2725
2726 shared_hash_destroy (set->vars);
2727 set->vars = shared_hash_copy (empty_shared_hash);
2728 }
2729
2730 /* Copy the contents of dataflow set SRC to DST. */
2731
2732 static void
2733 dataflow_set_copy (dataflow_set *dst, dataflow_set *src)
2734 {
2735 int i;
2736
2737 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2738 attrs_list_copy (&dst->regs[i], src->regs[i]);
2739
2740 shared_hash_destroy (dst->vars);
2741 dst->vars = shared_hash_copy (src->vars);
2742 dst->stack_adjust = src->stack_adjust;
2743 }
2744
2745 /* Information for merging lists of locations for a given offset of variable.
2746 */
2747 struct variable_union_info
2748 {
2749 /* Node of the location chain. */
2750 location_chain lc;
2751
2752 /* The sum of positions in the input chains. */
2753 int pos;
2754
2755 /* The position in the chain of DST dataflow set. */
2756 int pos_dst;
2757 };
2758
2759 /* Buffer for location list sorting and its allocated size. */
2760 static struct variable_union_info *vui_vec;
2761 static int vui_allocated;
2762
2763 /* Compare function for qsort, order the structures by POS element. */
2764
2765 static int
2766 variable_union_info_cmp_pos (const void *n1, const void *n2)
2767 {
2768 const struct variable_union_info *const i1 =
2769 (const struct variable_union_info *) n1;
2770 const struct variable_union_info *const i2 =
2771 ( const struct variable_union_info *) n2;
2772
2773 if (i1->pos != i2->pos)
2774 return i1->pos - i2->pos;
2775
2776 return (i1->pos_dst - i2->pos_dst);
2777 }
2778
2779 /* Compute union of location parts of variable *SLOT and the same variable
2780 from hash table DATA. Compute "sorted" union of the location chains
2781 for common offsets, i.e. the locations of a variable part are sorted by
2782 a priority where the priority is the sum of the positions in the 2 chains
2783 (if a location is only in one list the position in the second list is
2784 defined to be larger than the length of the chains).
2785 When we are updating the location parts the newest location is in the
2786 beginning of the chain, so when we do the described "sorted" union
2787 we keep the newest locations in the beginning. */
2788
2789 static int
2790 variable_union (variable src, dataflow_set *set)
2791 {
2792 variable dst;
2793 variable_def **dstp;
2794 int i, j, k;
2795
2796 dstp = shared_hash_find_slot (set->vars, src->dv);
2797 if (!dstp || !*dstp)
2798 {
2799 src->refcount++;
2800
2801 dst_can_be_shared = false;
2802 if (!dstp)
2803 dstp = shared_hash_find_slot_unshare (&set->vars, src->dv, INSERT);
2804
2805 *dstp = src;
2806
2807 /* Continue traversing the hash table. */
2808 return 1;
2809 }
2810 else
2811 dst = *dstp;
2812
2813 gcc_assert (src->n_var_parts);
2814 gcc_checking_assert (src->onepart == dst->onepart);
2815
2816 /* We can combine one-part variables very efficiently, because their
2817 entries are in canonical order. */
2818 if (src->onepart)
2819 {
2820 location_chain *nodep, dnode, snode;
2821
2822 gcc_assert (src->n_var_parts == 1
2823 && dst->n_var_parts == 1);
2824
2825 snode = src->var_part[0].loc_chain;
2826 gcc_assert (snode);
2827
2828 restart_onepart_unshared:
2829 nodep = &dst->var_part[0].loc_chain;
2830 dnode = *nodep;
2831 gcc_assert (dnode);
2832
2833 while (snode)
2834 {
2835 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1;
2836
2837 if (r > 0)
2838 {
2839 location_chain nnode;
2840
2841 if (shared_var_p (dst, set->vars))
2842 {
2843 dstp = unshare_variable (set, dstp, dst,
2844 VAR_INIT_STATUS_INITIALIZED);
2845 dst = *dstp;
2846 goto restart_onepart_unshared;
2847 }
2848
2849 *nodep = nnode = new location_chain_def;
2850 nnode->loc = snode->loc;
2851 nnode->init = snode->init;
2852 if (!snode->set_src || MEM_P (snode->set_src))
2853 nnode->set_src = NULL;
2854 else
2855 nnode->set_src = snode->set_src;
2856 nnode->next = dnode;
2857 dnode = nnode;
2858 }
2859 else if (r == 0)
2860 gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc));
2861
2862 if (r >= 0)
2863 snode = snode->next;
2864
2865 nodep = &dnode->next;
2866 dnode = *nodep;
2867 }
2868
2869 return 1;
2870 }
2871
2872 gcc_checking_assert (!src->onepart);
2873
2874 /* Count the number of location parts, result is K. */
2875 for (i = 0, j = 0, k = 0;
2876 i < src->n_var_parts && j < dst->n_var_parts; k++)
2877 {
2878 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2879 {
2880 i++;
2881 j++;
2882 }
2883 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
2884 i++;
2885 else
2886 j++;
2887 }
2888 k += src->n_var_parts - i;
2889 k += dst->n_var_parts - j;
2890
2891 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
2892 thus there are at most MAX_VAR_PARTS different offsets. */
2893 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS);
2894
2895 if (dst->n_var_parts != k && shared_var_p (dst, set->vars))
2896 {
2897 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN);
2898 dst = *dstp;
2899 }
2900
2901 i = src->n_var_parts - 1;
2902 j = dst->n_var_parts - 1;
2903 dst->n_var_parts = k;
2904
2905 for (k--; k >= 0; k--)
2906 {
2907 location_chain node, node2;
2908
2909 if (i >= 0 && j >= 0
2910 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j))
2911 {
2912 /* Compute the "sorted" union of the chains, i.e. the locations which
2913 are in both chains go first, they are sorted by the sum of
2914 positions in the chains. */
2915 int dst_l, src_l;
2916 int ii, jj, n;
2917 struct variable_union_info *vui;
2918
2919 /* If DST is shared compare the location chains.
2920 If they are different we will modify the chain in DST with
2921 high probability so make a copy of DST. */
2922 if (shared_var_p (dst, set->vars))
2923 {
2924 for (node = src->var_part[i].loc_chain,
2925 node2 = dst->var_part[j].loc_chain; node && node2;
2926 node = node->next, node2 = node2->next)
2927 {
2928 if (!((REG_P (node2->loc)
2929 && REG_P (node->loc)
2930 && REGNO (node2->loc) == REGNO (node->loc))
2931 || rtx_equal_p (node2->loc, node->loc)))
2932 {
2933 if (node2->init < node->init)
2934 node2->init = node->init;
2935 break;
2936 }
2937 }
2938 if (node || node2)
2939 {
2940 dstp = unshare_variable (set, dstp, dst,
2941 VAR_INIT_STATUS_UNKNOWN);
2942 dst = (variable)*dstp;
2943 }
2944 }
2945
2946 src_l = 0;
2947 for (node = src->var_part[i].loc_chain; node; node = node->next)
2948 src_l++;
2949 dst_l = 0;
2950 for (node = dst->var_part[j].loc_chain; node; node = node->next)
2951 dst_l++;
2952
2953 if (dst_l == 1)
2954 {
2955 /* The most common case, much simpler, no qsort is needed. */
2956 location_chain dstnode = dst->var_part[j].loc_chain;
2957 dst->var_part[k].loc_chain = dstnode;
2958 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
2959 node2 = dstnode;
2960 for (node = src->var_part[i].loc_chain; node; node = node->next)
2961 if (!((REG_P (dstnode->loc)
2962 && REG_P (node->loc)
2963 && REGNO (dstnode->loc) == REGNO (node->loc))
2964 || rtx_equal_p (dstnode->loc, node->loc)))
2965 {
2966 location_chain new_node;
2967
2968 /* Copy the location from SRC. */
2969 new_node = new location_chain_def;
2970 new_node->loc = node->loc;
2971 new_node->init = node->init;
2972 if (!node->set_src || MEM_P (node->set_src))
2973 new_node->set_src = NULL;
2974 else
2975 new_node->set_src = node->set_src;
2976 node2->next = new_node;
2977 node2 = new_node;
2978 }
2979 node2->next = NULL;
2980 }
2981 else
2982 {
2983 if (src_l + dst_l > vui_allocated)
2984 {
2985 vui_allocated = MAX (vui_allocated * 2, src_l + dst_l);
2986 vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec,
2987 vui_allocated);
2988 }
2989 vui = vui_vec;
2990
2991 /* Fill in the locations from DST. */
2992 for (node = dst->var_part[j].loc_chain, jj = 0; node;
2993 node = node->next, jj++)
2994 {
2995 vui[jj].lc = node;
2996 vui[jj].pos_dst = jj;
2997
2998 /* Pos plus value larger than a sum of 2 valid positions. */
2999 vui[jj].pos = jj + src_l + dst_l;
3000 }
3001
3002 /* Fill in the locations from SRC. */
3003 n = dst_l;
3004 for (node = src->var_part[i].loc_chain, ii = 0; node;
3005 node = node->next, ii++)
3006 {
3007 /* Find location from NODE. */
3008 for (jj = 0; jj < dst_l; jj++)
3009 {
3010 if ((REG_P (vui[jj].lc->loc)
3011 && REG_P (node->loc)
3012 && REGNO (vui[jj].lc->loc) == REGNO (node->loc))
3013 || rtx_equal_p (vui[jj].lc->loc, node->loc))
3014 {
3015 vui[jj].pos = jj + ii;
3016 break;
3017 }
3018 }
3019 if (jj >= dst_l) /* The location has not been found. */
3020 {
3021 location_chain new_node;
3022
3023 /* Copy the location from SRC. */
3024 new_node = new location_chain_def;
3025 new_node->loc = node->loc;
3026 new_node->init = node->init;
3027 if (!node->set_src || MEM_P (node->set_src))
3028 new_node->set_src = NULL;
3029 else
3030 new_node->set_src = node->set_src;
3031 vui[n].lc = new_node;
3032 vui[n].pos_dst = src_l + dst_l;
3033 vui[n].pos = ii + src_l + dst_l;
3034 n++;
3035 }
3036 }
3037
3038 if (dst_l == 2)
3039 {
3040 /* Special case still very common case. For dst_l == 2
3041 all entries dst_l ... n-1 are sorted, with for i >= dst_l
3042 vui[i].pos == i + src_l + dst_l. */
3043 if (vui[0].pos > vui[1].pos)
3044 {
3045 /* Order should be 1, 0, 2... */
3046 dst->var_part[k].loc_chain = vui[1].lc;
3047 vui[1].lc->next = vui[0].lc;
3048 if (n >= 3)
3049 {
3050 vui[0].lc->next = vui[2].lc;
3051 vui[n - 1].lc->next = NULL;
3052 }
3053 else
3054 vui[0].lc->next = NULL;
3055 ii = 3;
3056 }
3057 else
3058 {
3059 dst->var_part[k].loc_chain = vui[0].lc;
3060 if (n >= 3 && vui[2].pos < vui[1].pos)
3061 {
3062 /* Order should be 0, 2, 1, 3... */
3063 vui[0].lc->next = vui[2].lc;
3064 vui[2].lc->next = vui[1].lc;
3065 if (n >= 4)
3066 {
3067 vui[1].lc->next = vui[3].lc;
3068 vui[n - 1].lc->next = NULL;
3069 }
3070 else
3071 vui[1].lc->next = NULL;
3072 ii = 4;
3073 }
3074 else
3075 {
3076 /* Order should be 0, 1, 2... */
3077 ii = 1;
3078 vui[n - 1].lc->next = NULL;
3079 }
3080 }
3081 for (; ii < n; ii++)
3082 vui[ii - 1].lc->next = vui[ii].lc;
3083 }
3084 else
3085 {
3086 qsort (vui, n, sizeof (struct variable_union_info),
3087 variable_union_info_cmp_pos);
3088
3089 /* Reconnect the nodes in sorted order. */
3090 for (ii = 1; ii < n; ii++)
3091 vui[ii - 1].lc->next = vui[ii].lc;
3092 vui[n - 1].lc->next = NULL;
3093 dst->var_part[k].loc_chain = vui[0].lc;
3094 }
3095
3096 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j);
3097 }
3098 i--;
3099 j--;
3100 }
3101 else if ((i >= 0 && j >= 0
3102 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j))
3103 || i < 0)
3104 {
3105 dst->var_part[k] = dst->var_part[j];
3106 j--;
3107 }
3108 else if ((i >= 0 && j >= 0
3109 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j))
3110 || j < 0)
3111 {
3112 location_chain *nextp;
3113
3114 /* Copy the chain from SRC. */
3115 nextp = &dst->var_part[k].loc_chain;
3116 for (node = src->var_part[i].loc_chain; node; node = node->next)
3117 {
3118 location_chain new_lc;
3119
3120 new_lc = new location_chain_def;
3121 new_lc->next = NULL;
3122 new_lc->init = node->init;
3123 if (!node->set_src || MEM_P (node->set_src))
3124 new_lc->set_src = NULL;
3125 else
3126 new_lc->set_src = node->set_src;
3127 new_lc->loc = node->loc;
3128
3129 *nextp = new_lc;
3130 nextp = &new_lc->next;
3131 }
3132
3133 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i);
3134 i--;
3135 }
3136 dst->var_part[k].cur_loc = NULL;
3137 }
3138
3139 if (flag_var_tracking_uninit)
3140 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++)
3141 {
3142 location_chain node, node2;
3143 for (node = src->var_part[i].loc_chain; node; node = node->next)
3144 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next)
3145 if (rtx_equal_p (node->loc, node2->loc))
3146 {
3147 if (node->init > node2->init)
3148 node2->init = node->init;
3149 }
3150 }
3151
3152 /* Continue traversing the hash table. */
3153 return 1;
3154 }
3155
3156 /* Compute union of dataflow sets SRC and DST and store it to DST. */
3157
3158 static void
3159 dataflow_set_union (dataflow_set *dst, dataflow_set *src)
3160 {
3161 int i;
3162
3163 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3164 attrs_list_union (&dst->regs[i], src->regs[i]);
3165
3166 if (dst->vars == empty_shared_hash)
3167 {
3168 shared_hash_destroy (dst->vars);
3169 dst->vars = shared_hash_copy (src->vars);
3170 }
3171 else
3172 {
3173 variable_iterator_type hi;
3174 variable var;
3175
3176 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars),
3177 var, variable, hi)
3178 variable_union (var, dst);
3179 }
3180 }
3181
3182 /* Whether the value is currently being expanded. */
3183 #define VALUE_RECURSED_INTO(x) \
3184 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used)
3185
3186 /* Whether no expansion was found, saving useless lookups.
3187 It must only be set when VALUE_CHANGED is clear. */
3188 #define NO_LOC_P(x) \
3189 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val)
3190
3191 /* Whether cur_loc in the value needs to be (re)computed. */
3192 #define VALUE_CHANGED(x) \
3193 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related)
3194 /* Whether cur_loc in the decl needs to be (re)computed. */
3195 #define DECL_CHANGED(x) TREE_VISITED (x)
3196
3197 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For
3198 user DECLs, this means they're in changed_variables. Values and
3199 debug exprs may be left with this flag set if no user variable
3200 requires them to be evaluated. */
3201
3202 static inline void
3203 set_dv_changed (decl_or_value dv, bool newv)
3204 {
3205 switch (dv_onepart_p (dv))
3206 {
3207 case ONEPART_VALUE:
3208 if (newv)
3209 NO_LOC_P (dv_as_value (dv)) = false;
3210 VALUE_CHANGED (dv_as_value (dv)) = newv;
3211 break;
3212
3213 case ONEPART_DEXPR:
3214 if (newv)
3215 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false;
3216 /* Fall through... */
3217
3218 default:
3219 DECL_CHANGED (dv_as_decl (dv)) = newv;
3220 break;
3221 }
3222 }
3223
3224 /* Return true if DV needs to have its cur_loc recomputed. */
3225
3226 static inline bool
3227 dv_changed_p (decl_or_value dv)
3228 {
3229 return (dv_is_value_p (dv)
3230 ? VALUE_CHANGED (dv_as_value (dv))
3231 : DECL_CHANGED (dv_as_decl (dv)));
3232 }
3233
3234 /* Return a location list node whose loc is rtx_equal to LOC, in the
3235 location list of a one-part variable or value VAR, or in that of
3236 any values recursively mentioned in the location lists. VARS must
3237 be in star-canonical form. */
3238
3239 static location_chain
3240 find_loc_in_1pdv (rtx loc, variable var, variable_table_type *vars)
3241 {
3242 location_chain node;
3243 enum rtx_code loc_code;
3244
3245 if (!var)
3246 return NULL;
3247
3248 gcc_checking_assert (var->onepart);
3249
3250 if (!var->n_var_parts)
3251 return NULL;
3252
3253 gcc_checking_assert (loc != dv_as_opaque (var->dv));
3254
3255 loc_code = GET_CODE (loc);
3256 for (node = var->var_part[0].loc_chain; node; node = node->next)
3257 {
3258 decl_or_value dv;
3259 variable rvar;
3260
3261 if (GET_CODE (node->loc) != loc_code)
3262 {
3263 if (GET_CODE (node->loc) != VALUE)
3264 continue;
3265 }
3266 else if (loc == node->loc)
3267 return node;
3268 else if (loc_code != VALUE)
3269 {
3270 if (rtx_equal_p (loc, node->loc))
3271 return node;
3272 continue;
3273 }
3274
3275 /* Since we're in star-canonical form, we don't need to visit
3276 non-canonical nodes: one-part variables and non-canonical
3277 values would only point back to the canonical node. */
3278 if (dv_is_value_p (var->dv)
3279 && !canon_value_cmp (node->loc, dv_as_value (var->dv)))
3280 {
3281 /* Skip all subsequent VALUEs. */
3282 while (node->next && GET_CODE (node->next->loc) == VALUE)
3283 {
3284 node = node->next;
3285 gcc_checking_assert (!canon_value_cmp (node->loc,
3286 dv_as_value (var->dv)));
3287 if (loc == node->loc)
3288 return node;
3289 }
3290 continue;
3291 }
3292
3293 gcc_checking_assert (node == var->var_part[0].loc_chain);
3294 gcc_checking_assert (!node->next);
3295
3296 dv = dv_from_value (node->loc);
3297 rvar = vars->find_with_hash (dv, dv_htab_hash (dv));
3298 return find_loc_in_1pdv (loc, rvar, vars);
3299 }
3300
3301 /* ??? Gotta look in cselib_val locations too. */
3302
3303 return NULL;
3304 }
3305
3306 /* Hash table iteration argument passed to variable_merge. */
3307 struct dfset_merge
3308 {
3309 /* The set in which the merge is to be inserted. */
3310 dataflow_set *dst;
3311 /* The set that we're iterating in. */
3312 dataflow_set *cur;
3313 /* The set that may contain the other dv we are to merge with. */
3314 dataflow_set *src;
3315 /* Number of onepart dvs in src. */
3316 int src_onepart_cnt;
3317 };
3318
3319 /* Insert LOC in *DNODE, if it's not there yet. The list must be in
3320 loc_cmp order, and it is maintained as such. */
3321
3322 static void
3323 insert_into_intersection (location_chain *nodep, rtx loc,
3324 enum var_init_status status)
3325 {
3326 location_chain node;
3327 int r;
3328
3329 for (node = *nodep; node; nodep = &node->next, node = *nodep)
3330 if ((r = loc_cmp (node->loc, loc)) == 0)
3331 {
3332 node->init = MIN (node->init, status);
3333 return;
3334 }
3335 else if (r > 0)
3336 break;
3337
3338 node = new location_chain_def;
3339
3340 node->loc = loc;
3341 node->set_src = NULL;
3342 node->init = status;
3343 node->next = *nodep;
3344 *nodep = node;
3345 }
3346
3347 /* Insert in DEST the intersection of the locations present in both
3348 S1NODE and S2VAR, directly or indirectly. S1NODE is from a
3349 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in
3350 DSM->dst. */
3351
3352 static void
3353 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm,
3354 location_chain s1node, variable s2var)
3355 {
3356 dataflow_set *s1set = dsm->cur;
3357 dataflow_set *s2set = dsm->src;
3358 location_chain found;
3359
3360 if (s2var)
3361 {
3362 location_chain s2node;
3363
3364 gcc_checking_assert (s2var->onepart);
3365
3366 if (s2var->n_var_parts)
3367 {
3368 s2node = s2var->var_part[0].loc_chain;
3369
3370 for (; s1node && s2node;
3371 s1node = s1node->next, s2node = s2node->next)
3372 if (s1node->loc != s2node->loc)
3373 break;
3374 else if (s1node->loc == val)
3375 continue;
3376 else
3377 insert_into_intersection (dest, s1node->loc,
3378 MIN (s1node->init, s2node->init));
3379 }
3380 }
3381
3382 for (; s1node; s1node = s1node->next)
3383 {
3384 if (s1node->loc == val)
3385 continue;
3386
3387 if ((found = find_loc_in_1pdv (s1node->loc, s2var,
3388 shared_hash_htab (s2set->vars))))
3389 {
3390 insert_into_intersection (dest, s1node->loc,
3391 MIN (s1node->init, found->init));
3392 continue;
3393 }
3394
3395 if (GET_CODE (s1node->loc) == VALUE
3396 && !VALUE_RECURSED_INTO (s1node->loc))
3397 {
3398 decl_or_value dv = dv_from_value (s1node->loc);
3399 variable svar = shared_hash_find (s1set->vars, dv);
3400 if (svar)
3401 {
3402 if (svar->n_var_parts == 1)
3403 {
3404 VALUE_RECURSED_INTO (s1node->loc) = true;
3405 intersect_loc_chains (val, dest, dsm,
3406 svar->var_part[0].loc_chain,
3407 s2var);
3408 VALUE_RECURSED_INTO (s1node->loc) = false;
3409 }
3410 }
3411 }
3412
3413 /* ??? gotta look in cselib_val locations too. */
3414
3415 /* ??? if the location is equivalent to any location in src,
3416 searched recursively
3417
3418 add to dst the values needed to represent the equivalence
3419
3420 telling whether locations S is equivalent to another dv's
3421 location list:
3422
3423 for each location D in the list
3424
3425 if S and D satisfy rtx_equal_p, then it is present
3426
3427 else if D is a value, recurse without cycles
3428
3429 else if S and D have the same CODE and MODE
3430
3431 for each operand oS and the corresponding oD
3432
3433 if oS and oD are not equivalent, then S an D are not equivalent
3434
3435 else if they are RTX vectors
3436
3437 if any vector oS element is not equivalent to its respective oD,
3438 then S and D are not equivalent
3439
3440 */
3441
3442
3443 }
3444 }
3445
3446 /* Return -1 if X should be before Y in a location list for a 1-part
3447 variable, 1 if Y should be before X, and 0 if they're equivalent
3448 and should not appear in the list. */
3449
3450 static int
3451 loc_cmp (rtx x, rtx y)
3452 {
3453 int i, j, r;
3454 RTX_CODE code = GET_CODE (x);
3455 const char *fmt;
3456
3457 if (x == y)
3458 return 0;
3459
3460 if (REG_P (x))
3461 {
3462 if (!REG_P (y))
3463 return -1;
3464 gcc_assert (GET_MODE (x) == GET_MODE (y));
3465 if (REGNO (x) == REGNO (y))
3466 return 0;
3467 else if (REGNO (x) < REGNO (y))
3468 return -1;
3469 else
3470 return 1;
3471 }
3472
3473 if (REG_P (y))
3474 return 1;
3475
3476 if (MEM_P (x))
3477 {
3478 if (!MEM_P (y))
3479 return -1;
3480 gcc_assert (GET_MODE (x) == GET_MODE (y));
3481 return loc_cmp (XEXP (x, 0), XEXP (y, 0));
3482 }
3483
3484 if (MEM_P (y))
3485 return 1;
3486
3487 if (GET_CODE (x) == VALUE)
3488 {
3489 if (GET_CODE (y) != VALUE)
3490 return -1;
3491 /* Don't assert the modes are the same, that is true only
3492 when not recursing. (subreg:QI (value:SI 1:1) 0)
3493 and (subreg:QI (value:DI 2:2) 0) can be compared,
3494 even when the modes are different. */
3495 if (canon_value_cmp (x, y))
3496 return -1;
3497 else
3498 return 1;
3499 }
3500
3501 if (GET_CODE (y) == VALUE)
3502 return 1;
3503
3504 /* Entry value is the least preferable kind of expression. */
3505 if (GET_CODE (x) == ENTRY_VALUE)
3506 {
3507 if (GET_CODE (y) != ENTRY_VALUE)
3508 return 1;
3509 gcc_assert (GET_MODE (x) == GET_MODE (y));
3510 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
3511 }
3512
3513 if (GET_CODE (y) == ENTRY_VALUE)
3514 return -1;
3515
3516 if (GET_CODE (x) == GET_CODE (y))
3517 /* Compare operands below. */;
3518 else if (GET_CODE (x) < GET_CODE (y))
3519 return -1;
3520 else
3521 return 1;
3522
3523 gcc_assert (GET_MODE (x) == GET_MODE (y));
3524
3525 if (GET_CODE (x) == DEBUG_EXPR)
3526 {
3527 if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3528 < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)))
3529 return -1;
3530 gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x))
3531 > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y)));
3532 return 1;
3533 }
3534
3535 fmt = GET_RTX_FORMAT (code);
3536 for (i = 0; i < GET_RTX_LENGTH (code); i++)
3537 switch (fmt[i])
3538 {
3539 case 'w':
3540 if (XWINT (x, i) == XWINT (y, i))
3541 break;
3542 else if (XWINT (x, i) < XWINT (y, i))
3543 return -1;
3544 else
3545 return 1;
3546
3547 case 'n':
3548 case 'i':
3549 if (XINT (x, i) == XINT (y, i))
3550 break;
3551 else if (XINT (x, i) < XINT (y, i))
3552 return -1;
3553 else
3554 return 1;
3555
3556 case 'V':
3557 case 'E':
3558 /* Compare the vector length first. */
3559 if (XVECLEN (x, i) == XVECLEN (y, i))
3560 /* Compare the vectors elements. */;
3561 else if (XVECLEN (x, i) < XVECLEN (y, i))
3562 return -1;
3563 else
3564 return 1;
3565
3566 for (j = 0; j < XVECLEN (x, i); j++)
3567 if ((r = loc_cmp (XVECEXP (x, i, j),
3568 XVECEXP (y, i, j))))
3569 return r;
3570 break;
3571
3572 case 'e':
3573 if ((r = loc_cmp (XEXP (x, i), XEXP (y, i))))
3574 return r;
3575 break;
3576
3577 case 'S':
3578 case 's':
3579 if (XSTR (x, i) == XSTR (y, i))
3580 break;
3581 if (!XSTR (x, i))
3582 return -1;
3583 if (!XSTR (y, i))
3584 return 1;
3585 if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0)
3586 break;
3587 else if (r < 0)
3588 return -1;
3589 else
3590 return 1;
3591
3592 case 'u':
3593 /* These are just backpointers, so they don't matter. */
3594 break;
3595
3596 case '0':
3597 case 't':
3598 break;
3599
3600 /* It is believed that rtx's at this level will never
3601 contain anything but integers and other rtx's,
3602 except for within LABEL_REFs and SYMBOL_REFs. */
3603 default:
3604 gcc_unreachable ();
3605 }
3606 if (CONST_WIDE_INT_P (x))
3607 {
3608 /* Compare the vector length first. */
3609 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y))
3610 return 1;
3611 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y))
3612 return -1;
3613
3614 /* Compare the vectors elements. */;
3615 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--)
3616 {
3617 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j))
3618 return -1;
3619 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j))
3620 return 1;
3621 }
3622 }
3623
3624 return 0;
3625 }
3626
3627 #if ENABLE_CHECKING
3628 /* Check the order of entries in one-part variables. */
3629
3630 int
3631 canonicalize_loc_order_check (variable_def **slot,
3632 dataflow_set *data ATTRIBUTE_UNUSED)
3633 {
3634 variable var = *slot;
3635 location_chain node, next;
3636
3637 #ifdef ENABLE_RTL_CHECKING
3638 int i;
3639 for (i = 0; i < var->n_var_parts; i++)
3640 gcc_assert (var->var_part[0].cur_loc == NULL);
3641 gcc_assert (!var->in_changed_variables);
3642 #endif
3643
3644 if (!var->onepart)
3645 return 1;
3646
3647 gcc_assert (var->n_var_parts == 1);
3648 node = var->var_part[0].loc_chain;
3649 gcc_assert (node);
3650
3651 while ((next = node->next))
3652 {
3653 gcc_assert (loc_cmp (node->loc, next->loc) < 0);
3654 node = next;
3655 }
3656
3657 return 1;
3658 }
3659 #endif
3660
3661 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are
3662 more likely to be chosen as canonical for an equivalence set.
3663 Ensure less likely values can reach more likely neighbors, making
3664 the connections bidirectional. */
3665
3666 int
3667 canonicalize_values_mark (variable_def **slot, dataflow_set *set)
3668 {
3669 variable var = *slot;
3670 decl_or_value dv = var->dv;
3671 rtx val;
3672 location_chain node;
3673
3674 if (!dv_is_value_p (dv))
3675 return 1;
3676
3677 gcc_checking_assert (var->n_var_parts == 1);
3678
3679 val = dv_as_value (dv);
3680
3681 for (node = var->var_part[0].loc_chain; node; node = node->next)
3682 if (GET_CODE (node->loc) == VALUE)
3683 {
3684 if (canon_value_cmp (node->loc, val))
3685 VALUE_RECURSED_INTO (val) = true;
3686 else
3687 {
3688 decl_or_value odv = dv_from_value (node->loc);
3689 variable_def **oslot;
3690 oslot = shared_hash_find_slot_noinsert (set->vars, odv);
3691
3692 set_slot_part (set, val, oslot, odv, 0,
3693 node->init, NULL_RTX);
3694
3695 VALUE_RECURSED_INTO (node->loc) = true;
3696 }
3697 }
3698
3699 return 1;
3700 }
3701
3702 /* Remove redundant entries from equivalence lists in onepart
3703 variables, canonicalizing equivalence sets into star shapes. */
3704
3705 int
3706 canonicalize_values_star (variable_def **slot, dataflow_set *set)
3707 {
3708 variable var = *slot;
3709 decl_or_value dv = var->dv;
3710 location_chain node;
3711 decl_or_value cdv;
3712 rtx val, cval;
3713 variable_def **cslot;
3714 bool has_value;
3715 bool has_marks;
3716
3717 if (!var->onepart)
3718 return 1;
3719
3720 gcc_checking_assert (var->n_var_parts == 1);
3721
3722 if (dv_is_value_p (dv))
3723 {
3724 cval = dv_as_value (dv);
3725 if (!VALUE_RECURSED_INTO (cval))
3726 return 1;
3727 VALUE_RECURSED_INTO (cval) = false;
3728 }
3729 else
3730 cval = NULL_RTX;
3731
3732 restart:
3733 val = cval;
3734 has_value = false;
3735 has_marks = false;
3736
3737 gcc_assert (var->n_var_parts == 1);
3738
3739 for (node = var->var_part[0].loc_chain; node; node = node->next)
3740 if (GET_CODE (node->loc) == VALUE)
3741 {
3742 has_value = true;
3743 if (VALUE_RECURSED_INTO (node->loc))
3744 has_marks = true;
3745 if (canon_value_cmp (node->loc, cval))
3746 cval = node->loc;
3747 }
3748
3749 if (!has_value)
3750 return 1;
3751
3752 if (cval == val)
3753 {
3754 if (!has_marks || dv_is_decl_p (dv))
3755 return 1;
3756
3757 /* Keep it marked so that we revisit it, either after visiting a
3758 child node, or after visiting a new parent that might be
3759 found out. */
3760 VALUE_RECURSED_INTO (val) = true;
3761
3762 for (node = var->var_part[0].loc_chain; node; node = node->next)
3763 if (GET_CODE (node->loc) == VALUE
3764 && VALUE_RECURSED_INTO (node->loc))
3765 {
3766 cval = node->loc;
3767 restart_with_cval:
3768 VALUE_RECURSED_INTO (cval) = false;
3769 dv = dv_from_value (cval);
3770 slot = shared_hash_find_slot_noinsert (set->vars, dv);
3771 if (!slot)
3772 {
3773 gcc_assert (dv_is_decl_p (var->dv));
3774 /* The canonical value was reset and dropped.
3775 Remove it. */
3776 clobber_variable_part (set, NULL, var->dv, 0, NULL);
3777 return 1;
3778 }
3779 var = *slot;
3780 gcc_assert (dv_is_value_p (var->dv));
3781 if (var->n_var_parts == 0)
3782 return 1;
3783 gcc_assert (var->n_var_parts == 1);
3784 goto restart;
3785 }
3786
3787 VALUE_RECURSED_INTO (val) = false;
3788
3789 return 1;
3790 }
3791
3792 /* Push values to the canonical one. */
3793 cdv = dv_from_value (cval);
3794 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3795
3796 for (node = var->var_part[0].loc_chain; node; node = node->next)
3797 if (node->loc != cval)
3798 {
3799 cslot = set_slot_part (set, node->loc, cslot, cdv, 0,
3800 node->init, NULL_RTX);
3801 if (GET_CODE (node->loc) == VALUE)
3802 {
3803 decl_or_value ndv = dv_from_value (node->loc);
3804
3805 set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX,
3806 NO_INSERT);
3807
3808 if (canon_value_cmp (node->loc, val))
3809 {
3810 /* If it could have been a local minimum, it's not any more,
3811 since it's now neighbor to cval, so it may have to push
3812 to it. Conversely, if it wouldn't have prevailed over
3813 val, then whatever mark it has is fine: if it was to
3814 push, it will now push to a more canonical node, but if
3815 it wasn't, then it has already pushed any values it might
3816 have to. */
3817 VALUE_RECURSED_INTO (node->loc) = true;
3818 /* Make sure we visit node->loc by ensuring we cval is
3819 visited too. */
3820 VALUE_RECURSED_INTO (cval) = true;
3821 }
3822 else if (!VALUE_RECURSED_INTO (node->loc))
3823 /* If we have no need to "recurse" into this node, it's
3824 already "canonicalized", so drop the link to the old
3825 parent. */
3826 clobber_variable_part (set, cval, ndv, 0, NULL);
3827 }
3828 else if (GET_CODE (node->loc) == REG)
3829 {
3830 attrs list = set->regs[REGNO (node->loc)], *listp;
3831
3832 /* Change an existing attribute referring to dv so that it
3833 refers to cdv, removing any duplicate this might
3834 introduce, and checking that no previous duplicates
3835 existed, all in a single pass. */
3836
3837 while (list)
3838 {
3839 if (list->offset == 0
3840 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3841 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3842 break;
3843
3844 list = list->next;
3845 }
3846
3847 gcc_assert (list);
3848 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3849 {
3850 list->dv = cdv;
3851 for (listp = &list->next; (list = *listp); listp = &list->next)
3852 {
3853 if (list->offset)
3854 continue;
3855
3856 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3857 {
3858 *listp = list->next;
3859 delete list;
3860 list = *listp;
3861 break;
3862 }
3863
3864 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv));
3865 }
3866 }
3867 else if (dv_as_opaque (list->dv) == dv_as_opaque (cdv))
3868 {
3869 for (listp = &list->next; (list = *listp); listp = &list->next)
3870 {
3871 if (list->offset)
3872 continue;
3873
3874 if (dv_as_opaque (list->dv) == dv_as_opaque (dv))
3875 {
3876 *listp = list->next;
3877 delete list;
3878 list = *listp;
3879 break;
3880 }
3881
3882 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv));
3883 }
3884 }
3885 else
3886 gcc_unreachable ();
3887
3888 #if ENABLE_CHECKING
3889 while (list)
3890 {
3891 if (list->offset == 0
3892 && (dv_as_opaque (list->dv) == dv_as_opaque (dv)
3893 || dv_as_opaque (list->dv) == dv_as_opaque (cdv)))
3894 gcc_unreachable ();
3895
3896 list = list->next;
3897 }
3898 #endif
3899 }
3900 }
3901
3902 if (val)
3903 set_slot_part (set, val, cslot, cdv, 0,
3904 VAR_INIT_STATUS_INITIALIZED, NULL_RTX);
3905
3906 slot = clobber_slot_part (set, cval, slot, 0, NULL);
3907
3908 /* Variable may have been unshared. */
3909 var = *slot;
3910 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval
3911 && var->var_part[0].loc_chain->next == NULL);
3912
3913 if (VALUE_RECURSED_INTO (cval))
3914 goto restart_with_cval;
3915
3916 return 1;
3917 }
3918
3919 /* Bind one-part variables to the canonical value in an equivalence
3920 set. Not doing this causes dataflow convergence failure in rare
3921 circumstances, see PR42873. Unfortunately we can't do this
3922 efficiently as part of canonicalize_values_star, since we may not
3923 have determined or even seen the canonical value of a set when we
3924 get to a variable that references another member of the set. */
3925
3926 int
3927 canonicalize_vars_star (variable_def **slot, dataflow_set *set)
3928 {
3929 variable var = *slot;
3930 decl_or_value dv = var->dv;
3931 location_chain node;
3932 rtx cval;
3933 decl_or_value cdv;
3934 variable_def **cslot;
3935 variable cvar;
3936 location_chain cnode;
3937
3938 if (!var->onepart || var->onepart == ONEPART_VALUE)
3939 return 1;
3940
3941 gcc_assert (var->n_var_parts == 1);
3942
3943 node = var->var_part[0].loc_chain;
3944
3945 if (GET_CODE (node->loc) != VALUE)
3946 return 1;
3947
3948 gcc_assert (!node->next);
3949 cval = node->loc;
3950
3951 /* Push values to the canonical one. */
3952 cdv = dv_from_value (cval);
3953 cslot = shared_hash_find_slot_noinsert (set->vars, cdv);
3954 if (!cslot)
3955 return 1;
3956 cvar = *cslot;
3957 gcc_assert (cvar->n_var_parts == 1);
3958
3959 cnode = cvar->var_part[0].loc_chain;
3960
3961 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs
3962 that are not “more canonical” than it. */
3963 if (GET_CODE (cnode->loc) != VALUE
3964 || !canon_value_cmp (cnode->loc, cval))
3965 return 1;
3966
3967 /* CVAL was found to be non-canonical. Change the variable to point
3968 to the canonical VALUE. */
3969 gcc_assert (!cnode->next);
3970 cval = cnode->loc;
3971
3972 slot = set_slot_part (set, cval, slot, dv, 0,
3973 node->init, node->set_src);
3974 clobber_slot_part (set, cval, slot, 0, node->set_src);
3975
3976 return 1;
3977 }
3978
3979 /* Combine variable or value in *S1SLOT (in DSM->cur) with the
3980 corresponding entry in DSM->src. Multi-part variables are combined
3981 with variable_union, whereas onepart dvs are combined with
3982 intersection. */
3983
3984 static int
3985 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm)
3986 {
3987 dataflow_set *dst = dsm->dst;
3988 variable_def **dstslot;
3989 variable s2var, dvar = NULL;
3990 decl_or_value dv = s1var->dv;
3991 onepart_enum_t onepart = s1var->onepart;
3992 rtx val;
3993 hashval_t dvhash;
3994 location_chain node, *nodep;
3995
3996 /* If the incoming onepart variable has an empty location list, then
3997 the intersection will be just as empty. For other variables,
3998 it's always union. */
3999 gcc_checking_assert (s1var->n_var_parts
4000 && s1var->var_part[0].loc_chain);
4001
4002 if (!onepart)
4003 return variable_union (s1var, dst);
4004
4005 gcc_checking_assert (s1var->n_var_parts == 1);
4006
4007 dvhash = dv_htab_hash (dv);
4008 if (dv_is_value_p (dv))
4009 val = dv_as_value (dv);
4010 else
4011 val = NULL;
4012
4013 s2var = shared_hash_find_1 (dsm->src->vars, dv, dvhash);
4014 if (!s2var)
4015 {
4016 dst_can_be_shared = false;
4017 return 1;
4018 }
4019
4020 dsm->src_onepart_cnt--;
4021 gcc_assert (s2var->var_part[0].loc_chain
4022 && s2var->onepart == onepart
4023 && s2var->n_var_parts == 1);
4024
4025 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4026 if (dstslot)
4027 {
4028 dvar = *dstslot;
4029 gcc_assert (dvar->refcount == 1
4030 && dvar->onepart == onepart
4031 && dvar->n_var_parts == 1);
4032 nodep = &dvar->var_part[0].loc_chain;
4033 }
4034 else
4035 {
4036 nodep = &node;
4037 node = NULL;
4038 }
4039
4040 if (!dstslot && !onepart_variable_different_p (s1var, s2var))
4041 {
4042 dstslot = shared_hash_find_slot_unshare_1 (&dst->vars, dv,
4043 dvhash, INSERT);
4044 *dstslot = dvar = s2var;
4045 dvar->refcount++;
4046 }
4047 else
4048 {
4049 dst_can_be_shared = false;
4050
4051 intersect_loc_chains (val, nodep, dsm,
4052 s1var->var_part[0].loc_chain, s2var);
4053
4054 if (!dstslot)
4055 {
4056 if (node)
4057 {
4058 dvar = onepart_pool (onepart).allocate ();
4059 dvar->dv = dv;
4060 dvar->refcount = 1;
4061 dvar->n_var_parts = 1;
4062 dvar->onepart = onepart;
4063 dvar->in_changed_variables = false;
4064 dvar->var_part[0].loc_chain = node;
4065 dvar->var_part[0].cur_loc = NULL;
4066 if (onepart)
4067 VAR_LOC_1PAUX (dvar) = NULL;
4068 else
4069 VAR_PART_OFFSET (dvar, 0) = 0;
4070
4071 dstslot
4072 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash,
4073 INSERT);
4074 gcc_assert (!*dstslot);
4075 *dstslot = dvar;
4076 }
4077 else
4078 return 1;
4079 }
4080 }
4081
4082 nodep = &dvar->var_part[0].loc_chain;
4083 while ((node = *nodep))
4084 {
4085 location_chain *nextp = &node->next;
4086
4087 if (GET_CODE (node->loc) == REG)
4088 {
4089 attrs list;
4090
4091 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next)
4092 if (GET_MODE (node->loc) == GET_MODE (list->loc)
4093 && dv_is_value_p (list->dv))
4094 break;
4095
4096 if (!list)
4097 attrs_list_insert (&dst->regs[REGNO (node->loc)],
4098 dv, 0, node->loc);
4099 /* If this value became canonical for another value that had
4100 this register, we want to leave it alone. */
4101 else if (dv_as_value (list->dv) != val)
4102 {
4103 dstslot = set_slot_part (dst, dv_as_value (list->dv),
4104 dstslot, dv, 0,
4105 node->init, NULL_RTX);
4106 dstslot = delete_slot_part (dst, node->loc, dstslot, 0);
4107
4108 /* Since nextp points into the removed node, we can't
4109 use it. The pointer to the next node moved to nodep.
4110 However, if the variable we're walking is unshared
4111 during our walk, we'll keep walking the location list
4112 of the previously-shared variable, in which case the
4113 node won't have been removed, and we'll want to skip
4114 it. That's why we test *nodep here. */
4115 if (*nodep != node)
4116 nextp = nodep;
4117 }
4118 }
4119 else
4120 /* Canonicalization puts registers first, so we don't have to
4121 walk it all. */
4122 break;
4123 nodep = nextp;
4124 }
4125
4126 if (dvar != *dstslot)
4127 dvar = *dstslot;
4128 nodep = &dvar->var_part[0].loc_chain;
4129
4130 if (val)
4131 {
4132 /* Mark all referenced nodes for canonicalization, and make sure
4133 we have mutual equivalence links. */
4134 VALUE_RECURSED_INTO (val) = true;
4135 for (node = *nodep; node; node = node->next)
4136 if (GET_CODE (node->loc) == VALUE)
4137 {
4138 VALUE_RECURSED_INTO (node->loc) = true;
4139 set_variable_part (dst, val, dv_from_value (node->loc), 0,
4140 node->init, NULL, INSERT);
4141 }
4142
4143 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4144 gcc_assert (*dstslot == dvar);
4145 canonicalize_values_star (dstslot, dst);
4146 gcc_checking_assert (dstslot
4147 == shared_hash_find_slot_noinsert_1 (dst->vars,
4148 dv, dvhash));
4149 dvar = *dstslot;
4150 }
4151 else
4152 {
4153 bool has_value = false, has_other = false;
4154
4155 /* If we have one value and anything else, we're going to
4156 canonicalize this, so make sure all values have an entry in
4157 the table and are marked for canonicalization. */
4158 for (node = *nodep; node; node = node->next)
4159 {
4160 if (GET_CODE (node->loc) == VALUE)
4161 {
4162 /* If this was marked during register canonicalization,
4163 we know we have to canonicalize values. */
4164 if (has_value)
4165 has_other = true;
4166 has_value = true;
4167 if (has_other)
4168 break;
4169 }
4170 else
4171 {
4172 has_other = true;
4173 if (has_value)
4174 break;
4175 }
4176 }
4177
4178 if (has_value && has_other)
4179 {
4180 for (node = *nodep; node; node = node->next)
4181 {
4182 if (GET_CODE (node->loc) == VALUE)
4183 {
4184 decl_or_value dv = dv_from_value (node->loc);
4185 variable_def **slot = NULL;
4186
4187 if (shared_hash_shared (dst->vars))
4188 slot = shared_hash_find_slot_noinsert (dst->vars, dv);
4189 if (!slot)
4190 slot = shared_hash_find_slot_unshare (&dst->vars, dv,
4191 INSERT);
4192 if (!*slot)
4193 {
4194 variable var = onepart_pool (ONEPART_VALUE).allocate ();
4195 var->dv = dv;
4196 var->refcount = 1;
4197 var->n_var_parts = 1;
4198 var->onepart = ONEPART_VALUE;
4199 var->in_changed_variables = false;
4200 var->var_part[0].loc_chain = NULL;
4201 var->var_part[0].cur_loc = NULL;
4202 VAR_LOC_1PAUX (var) = NULL;
4203 *slot = var;
4204 }
4205
4206 VALUE_RECURSED_INTO (node->loc) = true;
4207 }
4208 }
4209
4210 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash);
4211 gcc_assert (*dstslot == dvar);
4212 canonicalize_values_star (dstslot, dst);
4213 gcc_checking_assert (dstslot
4214 == shared_hash_find_slot_noinsert_1 (dst->vars,
4215 dv, dvhash));
4216 dvar = *dstslot;
4217 }
4218 }
4219
4220 if (!onepart_variable_different_p (dvar, s2var))
4221 {
4222 variable_htab_free (dvar);
4223 *dstslot = dvar = s2var;
4224 dvar->refcount++;
4225 }
4226 else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var))
4227 {
4228 variable_htab_free (dvar);
4229 *dstslot = dvar = s1var;
4230 dvar->refcount++;
4231 dst_can_be_shared = false;
4232 }
4233 else
4234 dst_can_be_shared = false;
4235
4236 return 1;
4237 }
4238
4239 /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a
4240 multi-part variable. Unions of multi-part variables and
4241 intersections of one-part ones will be handled in
4242 variable_merge_over_cur(). */
4243
4244 static int
4245 variable_merge_over_src (variable s2var, struct dfset_merge *dsm)
4246 {
4247 dataflow_set *dst = dsm->dst;
4248 decl_or_value dv = s2var->dv;
4249
4250 if (!s2var->onepart)
4251 {
4252 variable_def **dstp = shared_hash_find_slot (dst->vars, dv);
4253 *dstp = s2var;
4254 s2var->refcount++;
4255 return 1;
4256 }
4257
4258 dsm->src_onepart_cnt++;
4259 return 1;
4260 }
4261
4262 /* Combine dataflow set information from SRC2 into DST, using PDST
4263 to carry over information across passes. */
4264
4265 static void
4266 dataflow_set_merge (dataflow_set *dst, dataflow_set *src2)
4267 {
4268 dataflow_set cur = *dst;
4269 dataflow_set *src1 = &cur;
4270 struct dfset_merge dsm;
4271 int i;
4272 size_t src1_elems, src2_elems;
4273 variable_iterator_type hi;
4274 variable var;
4275
4276 src1_elems = shared_hash_htab (src1->vars)->elements ();
4277 src2_elems = shared_hash_htab (src2->vars)->elements ();
4278 dataflow_set_init (dst);
4279 dst->stack_adjust = cur.stack_adjust;
4280 shared_hash_destroy (dst->vars);
4281 dst->vars = new shared_hash_def;
4282 dst->vars->refcount = 1;
4283 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems));
4284
4285 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4286 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]);
4287
4288 dsm.dst = dst;
4289 dsm.src = src2;
4290 dsm.cur = src1;
4291 dsm.src_onepart_cnt = 0;
4292
4293 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars),
4294 var, variable, hi)
4295 variable_merge_over_src (var, &dsm);
4296 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars),
4297 var, variable, hi)
4298 variable_merge_over_cur (var, &dsm);
4299
4300 if (dsm.src_onepart_cnt)
4301 dst_can_be_shared = false;
4302
4303 dataflow_set_destroy (src1);
4304 }
4305
4306 /* Mark register equivalences. */
4307
4308 static void
4309 dataflow_set_equiv_regs (dataflow_set *set)
4310 {
4311 int i;
4312 attrs list, *listp;
4313
4314 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4315 {
4316 rtx canon[NUM_MACHINE_MODES];
4317
4318 /* If the list is empty or one entry, no need to canonicalize
4319 anything. */
4320 if (set->regs[i] == NULL || set->regs[i]->next == NULL)
4321 continue;
4322
4323 memset (canon, 0, sizeof (canon));
4324
4325 for (list = set->regs[i]; list; list = list->next)
4326 if (list->offset == 0 && dv_is_value_p (list->dv))
4327 {
4328 rtx val = dv_as_value (list->dv);
4329 rtx *cvalp = &canon[(int)GET_MODE (val)];
4330 rtx cval = *cvalp;
4331
4332 if (canon_value_cmp (val, cval))
4333 *cvalp = val;
4334 }
4335
4336 for (list = set->regs[i]; list; list = list->next)
4337 if (list->offset == 0 && dv_onepart_p (list->dv))
4338 {
4339 rtx cval = canon[(int)GET_MODE (list->loc)];
4340
4341 if (!cval)
4342 continue;
4343
4344 if (dv_is_value_p (list->dv))
4345 {
4346 rtx val = dv_as_value (list->dv);
4347
4348 if (val == cval)
4349 continue;
4350
4351 VALUE_RECURSED_INTO (val) = true;
4352 set_variable_part (set, val, dv_from_value (cval), 0,
4353 VAR_INIT_STATUS_INITIALIZED,
4354 NULL, NO_INSERT);
4355 }
4356
4357 VALUE_RECURSED_INTO (cval) = true;
4358 set_variable_part (set, cval, list->dv, 0,
4359 VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT);
4360 }
4361
4362 for (listp = &set->regs[i]; (list = *listp);
4363 listp = list ? &list->next : listp)
4364 if (list->offset == 0 && dv_onepart_p (list->dv))
4365 {
4366 rtx cval = canon[(int)GET_MODE (list->loc)];
4367 variable_def **slot;
4368
4369 if (!cval)
4370 continue;
4371
4372 if (dv_is_value_p (list->dv))
4373 {
4374 rtx val = dv_as_value (list->dv);
4375 if (!VALUE_RECURSED_INTO (val))
4376 continue;
4377 }
4378
4379 slot = shared_hash_find_slot_noinsert (set->vars, list->dv);
4380 canonicalize_values_star (slot, set);
4381 if (*listp != list)
4382 list = NULL;
4383 }
4384 }
4385 }
4386
4387 /* Remove any redundant values in the location list of VAR, which must
4388 be unshared and 1-part. */
4389
4390 static void
4391 remove_duplicate_values (variable var)
4392 {
4393 location_chain node, *nodep;
4394
4395 gcc_assert (var->onepart);
4396 gcc_assert (var->n_var_parts == 1);
4397 gcc_assert (var->refcount == 1);
4398
4399 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); )
4400 {
4401 if (GET_CODE (node->loc) == VALUE)
4402 {
4403 if (VALUE_RECURSED_INTO (node->loc))
4404 {
4405 /* Remove duplicate value node. */
4406 *nodep = node->next;
4407 delete node;
4408 continue;
4409 }
4410 else
4411 VALUE_RECURSED_INTO (node->loc) = true;
4412 }
4413 nodep = &node->next;
4414 }
4415
4416 for (node = var->var_part[0].loc_chain; node; node = node->next)
4417 if (GET_CODE (node->loc) == VALUE)
4418 {
4419 gcc_assert (VALUE_RECURSED_INTO (node->loc));
4420 VALUE_RECURSED_INTO (node->loc) = false;
4421 }
4422 }
4423
4424
4425 /* Hash table iteration argument passed to variable_post_merge. */
4426 struct dfset_post_merge
4427 {
4428 /* The new input set for the current block. */
4429 dataflow_set *set;
4430 /* Pointer to the permanent input set for the current block, or
4431 NULL. */
4432 dataflow_set **permp;
4433 };
4434
4435 /* Create values for incoming expressions associated with one-part
4436 variables that don't have value numbers for them. */
4437
4438 int
4439 variable_post_merge_new_vals (variable_def **slot, dfset_post_merge *dfpm)
4440 {
4441 dataflow_set *set = dfpm->set;
4442 variable var = *slot;
4443 location_chain node;
4444
4445 if (!var->onepart || !var->n_var_parts)
4446 return 1;
4447
4448 gcc_assert (var->n_var_parts == 1);
4449
4450 if (dv_is_decl_p (var->dv))
4451 {
4452 bool check_dupes = false;
4453
4454 restart:
4455 for (node = var->var_part[0].loc_chain; node; node = node->next)
4456 {
4457 if (GET_CODE (node->loc) == VALUE)
4458 gcc_assert (!VALUE_RECURSED_INTO (node->loc));
4459 else if (GET_CODE (node->loc) == REG)
4460 {
4461 attrs att, *attp, *curp = NULL;
4462
4463 if (var->refcount != 1)
4464 {
4465 slot = unshare_variable (set, slot, var,
4466 VAR_INIT_STATUS_INITIALIZED);
4467 var = *slot;
4468 goto restart;
4469 }
4470
4471 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp);
4472 attp = &att->next)
4473 if (att->offset == 0
4474 && GET_MODE (att->loc) == GET_MODE (node->loc))
4475 {
4476 if (dv_is_value_p (att->dv))
4477 {
4478 rtx cval = dv_as_value (att->dv);
4479 node->loc = cval;
4480 check_dupes = true;
4481 break;
4482 }
4483 else if (dv_as_opaque (att->dv) == dv_as_opaque (var->dv))
4484 curp = attp;
4485 }
4486
4487 if (!curp)
4488 {
4489 curp = attp;
4490 while (*curp)
4491 if ((*curp)->offset == 0
4492 && GET_MODE ((*curp)->loc) == GET_MODE (node->loc)
4493 && dv_as_opaque ((*curp)->dv) == dv_as_opaque (var->dv))
4494 break;
4495 else
4496 curp = &(*curp)->next;
4497 gcc_assert (*curp);
4498 }
4499
4500 if (!att)
4501 {
4502 decl_or_value cdv;
4503 rtx cval;
4504
4505 if (!*dfpm->permp)
4506 {
4507 *dfpm->permp = XNEW (dataflow_set);
4508 dataflow_set_init (*dfpm->permp);
4509 }
4510
4511 for (att = (*dfpm->permp)->regs[REGNO (node->loc)];
4512 att; att = att->next)
4513 if (GET_MODE (att->loc) == GET_MODE (node->loc))
4514 {
4515 gcc_assert (att->offset == 0
4516 && dv_is_value_p (att->dv));
4517 val_reset (set, att->dv);
4518 break;
4519 }
4520
4521 if (att)
4522 {
4523 cdv = att->dv;
4524 cval = dv_as_value (cdv);
4525 }
4526 else
4527 {
4528 /* Create a unique value to hold this register,
4529 that ought to be found and reused in
4530 subsequent rounds. */
4531 cselib_val *v;
4532 gcc_assert (!cselib_lookup (node->loc,
4533 GET_MODE (node->loc), 0,
4534 VOIDmode));
4535 v = cselib_lookup (node->loc, GET_MODE (node->loc), 1,
4536 VOIDmode);
4537 cselib_preserve_value (v);
4538 cselib_invalidate_rtx (node->loc);
4539 cval = v->val_rtx;
4540 cdv = dv_from_value (cval);
4541 if (dump_file)
4542 fprintf (dump_file,
4543 "Created new value %u:%u for reg %i\n",
4544 v->uid, v->hash, REGNO (node->loc));
4545 }
4546
4547 var_reg_decl_set (*dfpm->permp, node->loc,
4548 VAR_INIT_STATUS_INITIALIZED,
4549 cdv, 0, NULL, INSERT);
4550
4551 node->loc = cval;
4552 check_dupes = true;
4553 }
4554
4555 /* Remove attribute referring to the decl, which now
4556 uses the value for the register, already existing or
4557 to be added when we bring perm in. */
4558 att = *curp;
4559 *curp = att->next;
4560 delete att;
4561 }
4562 }
4563
4564 if (check_dupes)
4565 remove_duplicate_values (var);
4566 }
4567
4568 return 1;
4569 }
4570
4571 /* Reset values in the permanent set that are not associated with the
4572 chosen expression. */
4573
4574 int
4575 variable_post_merge_perm_vals (variable_def **pslot, dfset_post_merge *dfpm)
4576 {
4577 dataflow_set *set = dfpm->set;
4578 variable pvar = *pslot, var;
4579 location_chain pnode;
4580 decl_or_value dv;
4581 attrs att;
4582
4583 gcc_assert (dv_is_value_p (pvar->dv)
4584 && pvar->n_var_parts == 1);
4585 pnode = pvar->var_part[0].loc_chain;
4586 gcc_assert (pnode
4587 && !pnode->next
4588 && REG_P (pnode->loc));
4589
4590 dv = pvar->dv;
4591
4592 var = shared_hash_find (set->vars, dv);
4593 if (var)
4594 {
4595 /* Although variable_post_merge_new_vals may have made decls
4596 non-star-canonical, values that pre-existed in canonical form
4597 remain canonical, and newly-created values reference a single
4598 REG, so they are canonical as well. Since VAR has the
4599 location list for a VALUE, using find_loc_in_1pdv for it is
4600 fine, since VALUEs don't map back to DECLs. */
4601 if (find_loc_in_1pdv (pnode->loc, var, shared_hash_htab (set->vars)))
4602 return 1;
4603 val_reset (set, dv);
4604 }
4605
4606 for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next)
4607 if (att->offset == 0
4608 && GET_MODE (att->loc) == GET_MODE (pnode->loc)
4609 && dv_is_value_p (att->dv))
4610 break;
4611
4612 /* If there is a value associated with this register already, create
4613 an equivalence. */
4614 if (att && dv_as_value (att->dv) != dv_as_value (dv))
4615 {
4616 rtx cval = dv_as_value (att->dv);
4617 set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT);
4618 set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init,
4619 NULL, INSERT);
4620 }
4621 else if (!att)
4622 {
4623 attrs_list_insert (&set->regs[REGNO (pnode->loc)],
4624 dv, 0, pnode->loc);
4625 variable_union (pvar, set);
4626 }
4627
4628 return 1;
4629 }
4630
4631 /* Just checking stuff and registering register attributes for
4632 now. */
4633
4634 static void
4635 dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp)
4636 {
4637 struct dfset_post_merge dfpm;
4638
4639 dfpm.set = set;
4640 dfpm.permp = permp;
4641
4642 shared_hash_htab (set->vars)
4643 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm);
4644 if (*permp)
4645 shared_hash_htab ((*permp)->vars)
4646 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm);
4647 shared_hash_htab (set->vars)
4648 ->traverse <dataflow_set *, canonicalize_values_star> (set);
4649 shared_hash_htab (set->vars)
4650 ->traverse <dataflow_set *, canonicalize_vars_star> (set);
4651 }
4652
4653 /* Return a node whose loc is a MEM that refers to EXPR in the
4654 location list of a one-part variable or value VAR, or in that of
4655 any values recursively mentioned in the location lists. */
4656
4657 static location_chain
4658 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars)
4659 {
4660 location_chain node;
4661 decl_or_value dv;
4662 variable var;
4663 location_chain where = NULL;
4664
4665 if (!val)
4666 return NULL;
4667
4668 gcc_assert (GET_CODE (val) == VALUE
4669 && !VALUE_RECURSED_INTO (val));
4670
4671 dv = dv_from_value (val);
4672 var = vars->find_with_hash (dv, dv_htab_hash (dv));
4673
4674 if (!var)
4675 return NULL;
4676
4677 gcc_assert (var->onepart);
4678
4679 if (!var->n_var_parts)
4680 return NULL;
4681
4682 VALUE_RECURSED_INTO (val) = true;
4683
4684 for (node = var->var_part[0].loc_chain; node; node = node->next)
4685 if (MEM_P (node->loc)
4686 && MEM_EXPR (node->loc) == expr
4687 && INT_MEM_OFFSET (node->loc) == 0)
4688 {
4689 where = node;
4690 break;
4691 }
4692 else if (GET_CODE (node->loc) == VALUE
4693 && !VALUE_RECURSED_INTO (node->loc)
4694 && (where = find_mem_expr_in_1pdv (expr, node->loc, vars)))
4695 break;
4696
4697 VALUE_RECURSED_INTO (val) = false;
4698
4699 return where;
4700 }
4701
4702 /* Return TRUE if the value of MEM may vary across a call. */
4703
4704 static bool
4705 mem_dies_at_call (rtx mem)
4706 {
4707 tree expr = MEM_EXPR (mem);
4708 tree decl;
4709
4710 if (!expr)
4711 return true;
4712
4713 decl = get_base_address (expr);
4714
4715 if (!decl)
4716 return true;
4717
4718 if (!DECL_P (decl))
4719 return true;
4720
4721 return (may_be_aliased (decl)
4722 || (!TREE_READONLY (decl) && is_global_var (decl)));
4723 }
4724
4725 /* Remove all MEMs from the location list of a hash table entry for a
4726 one-part variable, except those whose MEM attributes map back to
4727 the variable itself, directly or within a VALUE. */
4728
4729 int
4730 dataflow_set_preserve_mem_locs (variable_def **slot, dataflow_set *set)
4731 {
4732 variable var = *slot;
4733
4734 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR)
4735 {
4736 tree decl = dv_as_decl (var->dv);
4737 location_chain loc, *locp;
4738 bool changed = false;
4739
4740 if (!var->n_var_parts)
4741 return 1;
4742
4743 gcc_assert (var->n_var_parts == 1);
4744
4745 if (shared_var_p (var, set->vars))
4746 {
4747 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4748 {
4749 /* We want to remove dying MEMs that doesn't refer to DECL. */
4750 if (GET_CODE (loc->loc) == MEM
4751 && (MEM_EXPR (loc->loc) != decl
4752 || INT_MEM_OFFSET (loc->loc) != 0)
4753 && !mem_dies_at_call (loc->loc))
4754 break;
4755 /* We want to move here MEMs that do refer to DECL. */
4756 else if (GET_CODE (loc->loc) == VALUE
4757 && find_mem_expr_in_1pdv (decl, loc->loc,
4758 shared_hash_htab (set->vars)))
4759 break;
4760 }
4761
4762 if (!loc)
4763 return 1;
4764
4765 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4766 var = *slot;
4767 gcc_assert (var->n_var_parts == 1);
4768 }
4769
4770 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4771 loc; loc = *locp)
4772 {
4773 rtx old_loc = loc->loc;
4774 if (GET_CODE (old_loc) == VALUE)
4775 {
4776 location_chain mem_node
4777 = find_mem_expr_in_1pdv (decl, loc->loc,
4778 shared_hash_htab (set->vars));
4779
4780 /* ??? This picks up only one out of multiple MEMs that
4781 refer to the same variable. Do we ever need to be
4782 concerned about dealing with more than one, or, given
4783 that they should all map to the same variable
4784 location, their addresses will have been merged and
4785 they will be regarded as equivalent? */
4786 if (mem_node)
4787 {
4788 loc->loc = mem_node->loc;
4789 loc->set_src = mem_node->set_src;
4790 loc->init = MIN (loc->init, mem_node->init);
4791 }
4792 }
4793
4794 if (GET_CODE (loc->loc) != MEM
4795 || (MEM_EXPR (loc->loc) == decl
4796 && INT_MEM_OFFSET (loc->loc) == 0)
4797 || !mem_dies_at_call (loc->loc))
4798 {
4799 if (old_loc != loc->loc && emit_notes)
4800 {
4801 if (old_loc == var->var_part[0].cur_loc)
4802 {
4803 changed = true;
4804 var->var_part[0].cur_loc = NULL;
4805 }
4806 }
4807 locp = &loc->next;
4808 continue;
4809 }
4810
4811 if (emit_notes)
4812 {
4813 if (old_loc == var->var_part[0].cur_loc)
4814 {
4815 changed = true;
4816 var->var_part[0].cur_loc = NULL;
4817 }
4818 }
4819 *locp = loc->next;
4820 delete loc;
4821 }
4822
4823 if (!var->var_part[0].loc_chain)
4824 {
4825 var->n_var_parts--;
4826 changed = true;
4827 }
4828 if (changed)
4829 variable_was_changed (var, set);
4830 }
4831
4832 return 1;
4833 }
4834
4835 /* Remove all MEMs from the location list of a hash table entry for a
4836 value. */
4837
4838 int
4839 dataflow_set_remove_mem_locs (variable_def **slot, dataflow_set *set)
4840 {
4841 variable var = *slot;
4842
4843 if (var->onepart == ONEPART_VALUE)
4844 {
4845 location_chain loc, *locp;
4846 bool changed = false;
4847 rtx cur_loc;
4848
4849 gcc_assert (var->n_var_parts == 1);
4850
4851 if (shared_var_p (var, set->vars))
4852 {
4853 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next)
4854 if (GET_CODE (loc->loc) == MEM
4855 && mem_dies_at_call (loc->loc))
4856 break;
4857
4858 if (!loc)
4859 return 1;
4860
4861 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN);
4862 var = *slot;
4863 gcc_assert (var->n_var_parts == 1);
4864 }
4865
4866 if (VAR_LOC_1PAUX (var))
4867 cur_loc = VAR_LOC_FROM (var);
4868 else
4869 cur_loc = var->var_part[0].cur_loc;
4870
4871 for (locp = &var->var_part[0].loc_chain, loc = *locp;
4872 loc; loc = *locp)
4873 {
4874 if (GET_CODE (loc->loc) != MEM
4875 || !mem_dies_at_call (loc->loc))
4876 {
4877 locp = &loc->next;
4878 continue;
4879 }
4880
4881 *locp = loc->next;
4882 /* If we have deleted the location which was last emitted
4883 we have to emit new location so add the variable to set
4884 of changed variables. */
4885 if (cur_loc == loc->loc)
4886 {
4887 changed = true;
4888 var->var_part[0].cur_loc = NULL;
4889 if (VAR_LOC_1PAUX (var))
4890 VAR_LOC_FROM (var) = NULL;
4891 }
4892 delete loc;
4893 }
4894
4895 if (!var->var_part[0].loc_chain)
4896 {
4897 var->n_var_parts--;
4898 changed = true;
4899 }
4900 if (changed)
4901 variable_was_changed (var, set);
4902 }
4903
4904 return 1;
4905 }
4906
4907 /* Remove all variable-location information about call-clobbered
4908 registers, as well as associations between MEMs and VALUEs. */
4909
4910 static void
4911 dataflow_set_clear_at_call (dataflow_set *set)
4912 {
4913 unsigned int r;
4914 hard_reg_set_iterator hrsi;
4915
4916 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, r, hrsi)
4917 var_regno_delete (set, r);
4918
4919 if (MAY_HAVE_DEBUG_INSNS)
4920 {
4921 set->traversed_vars = set->vars;
4922 shared_hash_htab (set->vars)
4923 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set);
4924 set->traversed_vars = set->vars;
4925 shared_hash_htab (set->vars)
4926 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set);
4927 set->traversed_vars = NULL;
4928 }
4929 }
4930
4931 static bool
4932 variable_part_different_p (variable_part *vp1, variable_part *vp2)
4933 {
4934 location_chain lc1, lc2;
4935
4936 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next)
4937 {
4938 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next)
4939 {
4940 if (REG_P (lc1->loc) && REG_P (lc2->loc))
4941 {
4942 if (REGNO (lc1->loc) == REGNO (lc2->loc))
4943 break;
4944 }
4945 if (rtx_equal_p (lc1->loc, lc2->loc))
4946 break;
4947 }
4948 if (!lc2)
4949 return true;
4950 }
4951 return false;
4952 }
4953
4954 /* Return true if one-part variables VAR1 and VAR2 are different.
4955 They must be in canonical order. */
4956
4957 static bool
4958 onepart_variable_different_p (variable var1, variable var2)
4959 {
4960 location_chain lc1, lc2;
4961
4962 if (var1 == var2)
4963 return false;
4964
4965 gcc_assert (var1->n_var_parts == 1
4966 && var2->n_var_parts == 1);
4967
4968 lc1 = var1->var_part[0].loc_chain;
4969 lc2 = var2->var_part[0].loc_chain;
4970
4971 gcc_assert (lc1 && lc2);
4972
4973 while (lc1 && lc2)
4974 {
4975 if (loc_cmp (lc1->loc, lc2->loc))
4976 return true;
4977 lc1 = lc1->next;
4978 lc2 = lc2->next;
4979 }
4980
4981 return lc1 != lc2;
4982 }
4983
4984 /* Return true if variables VAR1 and VAR2 are different. */
4985
4986 static bool
4987 variable_different_p (variable var1, variable var2)
4988 {
4989 int i;
4990
4991 if (var1 == var2)
4992 return false;
4993
4994 if (var1->onepart != var2->onepart)
4995 return true;
4996
4997 if (var1->n_var_parts != var2->n_var_parts)
4998 return true;
4999
5000 if (var1->onepart && var1->n_var_parts)
5001 {
5002 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)
5003 && var1->n_var_parts == 1);
5004 /* One-part values have locations in a canonical order. */
5005 return onepart_variable_different_p (var1, var2);
5006 }
5007
5008 for (i = 0; i < var1->n_var_parts; i++)
5009 {
5010 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i))
5011 return true;
5012 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i]))
5013 return true;
5014 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i]))
5015 return true;
5016 }
5017 return false;
5018 }
5019
5020 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */
5021
5022 static bool
5023 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set)
5024 {
5025 variable_iterator_type hi;
5026 variable var1;
5027
5028 if (old_set->vars == new_set->vars)
5029 return false;
5030
5031 if (shared_hash_htab (old_set->vars)->elements ()
5032 != shared_hash_htab (new_set->vars)->elements ())
5033 return true;
5034
5035 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars),
5036 var1, variable, hi)
5037 {
5038 variable_table_type *htab = shared_hash_htab (new_set->vars);
5039 variable var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv));
5040 if (!var2)
5041 {
5042 if (dump_file && (dump_flags & TDF_DETAILS))
5043 {
5044 fprintf (dump_file, "dataflow difference found: removal of:\n");
5045 dump_var (var1);
5046 }
5047 return true;
5048 }
5049
5050 if (variable_different_p (var1, var2))
5051 {
5052 if (dump_file && (dump_flags & TDF_DETAILS))
5053 {
5054 fprintf (dump_file, "dataflow difference found: "
5055 "old and new follow:\n");
5056 dump_var (var1);
5057 dump_var (var2);
5058 }
5059 return true;
5060 }
5061 }
5062
5063 /* No need to traverse the second hashtab, if both have the same number
5064 of elements and the second one had all entries found in the first one,
5065 then it can't have any extra entries. */
5066 return false;
5067 }
5068
5069 /* Free the contents of dataflow set SET. */
5070
5071 static void
5072 dataflow_set_destroy (dataflow_set *set)
5073 {
5074 int i;
5075
5076 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5077 attrs_list_clear (&set->regs[i]);
5078
5079 shared_hash_destroy (set->vars);
5080 set->vars = NULL;
5081 }
5082
5083 /* Return true if RTL X contains a SYMBOL_REF. */
5084
5085 static bool
5086 contains_symbol_ref (rtx x)
5087 {
5088 const char *fmt;
5089 RTX_CODE code;
5090 int i;
5091
5092 if (!x)
5093 return false;
5094
5095 code = GET_CODE (x);
5096 if (code == SYMBOL_REF)
5097 return true;
5098
5099 fmt = GET_RTX_FORMAT (code);
5100 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5101 {
5102 if (fmt[i] == 'e')
5103 {
5104 if (contains_symbol_ref (XEXP (x, i)))
5105 return true;
5106 }
5107 else if (fmt[i] == 'E')
5108 {
5109 int j;
5110 for (j = 0; j < XVECLEN (x, i); j++)
5111 if (contains_symbol_ref (XVECEXP (x, i, j)))
5112 return true;
5113 }
5114 }
5115
5116 return false;
5117 }
5118
5119 /* Shall EXPR be tracked? */
5120
5121 static bool
5122 track_expr_p (tree expr, bool need_rtl)
5123 {
5124 rtx decl_rtl;
5125 tree realdecl;
5126
5127 if (TREE_CODE (expr) == DEBUG_EXPR_DECL)
5128 return DECL_RTL_SET_P (expr);
5129
5130 /* If EXPR is not a parameter or a variable do not track it. */
5131 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL)
5132 return 0;
5133
5134 /* It also must have a name... */
5135 if (!DECL_NAME (expr) && need_rtl)
5136 return 0;
5137
5138 /* ... and a RTL assigned to it. */
5139 decl_rtl = DECL_RTL_IF_SET (expr);
5140 if (!decl_rtl && need_rtl)
5141 return 0;
5142
5143 /* If this expression is really a debug alias of some other declaration, we
5144 don't need to track this expression if the ultimate declaration is
5145 ignored. */
5146 realdecl = expr;
5147 if (TREE_CODE (realdecl) == VAR_DECL && DECL_HAS_DEBUG_EXPR_P (realdecl))
5148 {
5149 realdecl = DECL_DEBUG_EXPR (realdecl);
5150 if (!DECL_P (realdecl))
5151 {
5152 if (handled_component_p (realdecl)
5153 || (TREE_CODE (realdecl) == MEM_REF
5154 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
5155 {
5156 HOST_WIDE_INT bitsize, bitpos, maxsize;
5157 tree innerdecl
5158 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize,
5159 &maxsize);
5160 if (!DECL_P (innerdecl)
5161 || DECL_IGNORED_P (innerdecl)
5162 /* Do not track declarations for parts of tracked parameters
5163 since we want to track them as a whole instead. */
5164 || (TREE_CODE (innerdecl) == PARM_DECL
5165 && DECL_MODE (innerdecl) != BLKmode
5166 && TREE_CODE (TREE_TYPE (innerdecl)) != UNION_TYPE)
5167 || TREE_STATIC (innerdecl)
5168 || bitsize <= 0
5169 || bitpos + bitsize > 256
5170 || bitsize != maxsize)
5171 return 0;
5172 else
5173 realdecl = expr;
5174 }
5175 else
5176 return 0;
5177 }
5178 }
5179
5180 /* Do not track EXPR if REALDECL it should be ignored for debugging
5181 purposes. */
5182 if (DECL_IGNORED_P (realdecl))
5183 return 0;
5184
5185 /* Do not track global variables until we are able to emit correct location
5186 list for them. */
5187 if (TREE_STATIC (realdecl))
5188 return 0;
5189
5190 /* When the EXPR is a DECL for alias of some variable (see example)
5191 the TREE_STATIC flag is not used. Disable tracking all DECLs whose
5192 DECL_RTL contains SYMBOL_REF.
5193
5194 Example:
5195 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
5196 char **_dl_argv;
5197 */
5198 if (decl_rtl && MEM_P (decl_rtl)
5199 && contains_symbol_ref (XEXP (decl_rtl, 0)))
5200 return 0;
5201
5202 /* If RTX is a memory it should not be very large (because it would be
5203 an array or struct). */
5204 if (decl_rtl && MEM_P (decl_rtl))
5205 {
5206 /* Do not track structures and arrays. */
5207 if (GET_MODE (decl_rtl) == BLKmode
5208 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl)))
5209 return 0;
5210 if (MEM_SIZE_KNOWN_P (decl_rtl)
5211 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS)
5212 return 0;
5213 }
5214
5215 DECL_CHANGED (expr) = 0;
5216 DECL_CHANGED (realdecl) = 0;
5217 return 1;
5218 }
5219
5220 /* Determine whether a given LOC refers to the same variable part as
5221 EXPR+OFFSET. */
5222
5223 static bool
5224 same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset)
5225 {
5226 tree expr2;
5227 HOST_WIDE_INT offset2;
5228
5229 if (! DECL_P (expr))
5230 return false;
5231
5232 if (REG_P (loc))
5233 {
5234 expr2 = REG_EXPR (loc);
5235 offset2 = REG_OFFSET (loc);
5236 }
5237 else if (MEM_P (loc))
5238 {
5239 expr2 = MEM_EXPR (loc);
5240 offset2 = INT_MEM_OFFSET (loc);
5241 }
5242 else
5243 return false;
5244
5245 if (! expr2 || ! DECL_P (expr2))
5246 return false;
5247
5248 expr = var_debug_decl (expr);
5249 expr2 = var_debug_decl (expr2);
5250
5251 return (expr == expr2 && offset == offset2);
5252 }
5253
5254 /* LOC is a REG or MEM that we would like to track if possible.
5255 If EXPR is null, we don't know what expression LOC refers to,
5256 otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if
5257 LOC is an lvalue register.
5258
5259 Return true if EXPR is nonnull and if LOC, or some lowpart of it,
5260 is something we can track. When returning true, store the mode of
5261 the lowpart we can track in *MODE_OUT (if nonnull) and its offset
5262 from EXPR in *OFFSET_OUT (if nonnull). */
5263
5264 static bool
5265 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p,
5266 machine_mode *mode_out, HOST_WIDE_INT *offset_out)
5267 {
5268 machine_mode mode;
5269
5270 if (expr == NULL || !track_expr_p (expr, true))
5271 return false;
5272
5273 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the
5274 whole subreg, but only the old inner part is really relevant. */
5275 mode = GET_MODE (loc);
5276 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc)))
5277 {
5278 machine_mode pseudo_mode;
5279
5280 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc));
5281 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode))
5282 {
5283 offset += byte_lowpart_offset (pseudo_mode, mode);
5284 mode = pseudo_mode;
5285 }
5286 }
5287
5288 /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself.
5289 Do the same if we are storing to a register and EXPR occupies
5290 the whole of register LOC; in that case, the whole of EXPR is
5291 being changed. We exclude complex modes from the second case
5292 because the real and imaginary parts are represented as separate
5293 pseudo registers, even if the whole complex value fits into one
5294 hard register. */
5295 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr))
5296 || (store_reg_p
5297 && !COMPLEX_MODE_P (DECL_MODE (expr))
5298 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1))
5299 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0)
5300 {
5301 mode = DECL_MODE (expr);
5302 offset = 0;
5303 }
5304
5305 if (offset < 0 || offset >= MAX_VAR_PARTS)
5306 return false;
5307
5308 if (mode_out)
5309 *mode_out = mode;
5310 if (offset_out)
5311 *offset_out = offset;
5312 return true;
5313 }
5314
5315 /* Return the MODE lowpart of LOC, or null if LOC is not something we
5316 want to track. When returning nonnull, make sure that the attributes
5317 on the returned value are updated. */
5318
5319 static rtx
5320 var_lowpart (machine_mode mode, rtx loc)
5321 {
5322 unsigned int offset, reg_offset, regno;
5323
5324 if (GET_MODE (loc) == mode)
5325 return loc;
5326
5327 if (!REG_P (loc) && !MEM_P (loc))
5328 return NULL;
5329
5330 offset = byte_lowpart_offset (mode, GET_MODE (loc));
5331
5332 if (MEM_P (loc))
5333 return adjust_address_nv (loc, mode, offset);
5334
5335 reg_offset = subreg_lowpart_offset (mode, GET_MODE (loc));
5336 regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc),
5337 reg_offset, mode);
5338 return gen_rtx_REG_offset (loc, mode, regno, offset);
5339 }
5340
5341 /* Carry information about uses and stores while walking rtx. */
5342
5343 struct count_use_info
5344 {
5345 /* The insn where the RTX is. */
5346 rtx_insn *insn;
5347
5348 /* The basic block where insn is. */
5349 basic_block bb;
5350
5351 /* The array of n_sets sets in the insn, as determined by cselib. */
5352 struct cselib_set *sets;
5353 int n_sets;
5354
5355 /* True if we're counting stores, false otherwise. */
5356 bool store_p;
5357 };
5358
5359 /* Find a VALUE corresponding to X. */
5360
5361 static inline cselib_val *
5362 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui)
5363 {
5364 int i;
5365
5366 if (cui->sets)
5367 {
5368 /* This is called after uses are set up and before stores are
5369 processed by cselib, so it's safe to look up srcs, but not
5370 dsts. So we look up expressions that appear in srcs or in
5371 dest expressions, but we search the sets array for dests of
5372 stores. */
5373 if (cui->store_p)
5374 {
5375 /* Some targets represent memset and memcpy patterns
5376 by (set (mem:BLK ...) (reg:[QHSD]I ...)) or
5377 (set (mem:BLK ...) (const_int ...)) or
5378 (set (mem:BLK ...) (mem:BLK ...)). Don't return anything
5379 in that case, otherwise we end up with mode mismatches. */
5380 if (mode == BLKmode && MEM_P (x))
5381 return NULL;
5382 for (i = 0; i < cui->n_sets; i++)
5383 if (cui->sets[i].dest == x)
5384 return cui->sets[i].src_elt;
5385 }
5386 else
5387 return cselib_lookup (x, mode, 0, VOIDmode);
5388 }
5389
5390 return NULL;
5391 }
5392
5393 /* Replace all registers and addresses in an expression with VALUE
5394 expressions that map back to them, unless the expression is a
5395 register. If no mapping is or can be performed, returns NULL. */
5396
5397 static rtx
5398 replace_expr_with_values (rtx loc)
5399 {
5400 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE)
5401 return NULL;
5402 else if (MEM_P (loc))
5403 {
5404 cselib_val *addr = cselib_lookup (XEXP (loc, 0),
5405 get_address_mode (loc), 0,
5406 GET_MODE (loc));
5407 if (addr)
5408 return replace_equiv_address_nv (loc, addr->val_rtx);
5409 else
5410 return NULL;
5411 }
5412 else
5413 return cselib_subst_to_values (loc, VOIDmode);
5414 }
5415
5416 /* Return true if X contains a DEBUG_EXPR. */
5417
5418 static bool
5419 rtx_debug_expr_p (const_rtx x)
5420 {
5421 subrtx_iterator::array_type array;
5422 FOR_EACH_SUBRTX (iter, array, x, ALL)
5423 if (GET_CODE (*iter) == DEBUG_EXPR)
5424 return true;
5425 return false;
5426 }
5427
5428 /* Determine what kind of micro operation to choose for a USE. Return
5429 MO_CLOBBER if no micro operation is to be generated. */
5430
5431 static enum micro_operation_type
5432 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep)
5433 {
5434 tree expr;
5435
5436 if (cui && cui->sets)
5437 {
5438 if (GET_CODE (loc) == VAR_LOCATION)
5439 {
5440 if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), false))
5441 {
5442 rtx ploc = PAT_VAR_LOCATION_LOC (loc);
5443 if (! VAR_LOC_UNKNOWN_P (ploc))
5444 {
5445 cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1,
5446 VOIDmode);
5447
5448 /* ??? flag_float_store and volatile mems are never
5449 given values, but we could in theory use them for
5450 locations. */
5451 gcc_assert (val || 1);
5452 }
5453 return MO_VAL_LOC;
5454 }
5455 else
5456 return MO_CLOBBER;
5457 }
5458
5459 if (REG_P (loc) || MEM_P (loc))
5460 {
5461 if (modep)
5462 *modep = GET_MODE (loc);
5463 if (cui->store_p)
5464 {
5465 if (REG_P (loc)
5466 || (find_use_val (loc, GET_MODE (loc), cui)
5467 && cselib_lookup (XEXP (loc, 0),
5468 get_address_mode (loc), 0,
5469 GET_MODE (loc))))
5470 return MO_VAL_SET;
5471 }
5472 else
5473 {
5474 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5475
5476 if (val && !cselib_preserved_value_p (val))
5477 return MO_VAL_USE;
5478 }
5479 }
5480 }
5481
5482 if (REG_P (loc))
5483 {
5484 gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER);
5485
5486 if (loc == cfa_base_rtx)
5487 return MO_CLOBBER;
5488 expr = REG_EXPR (loc);
5489
5490 if (!expr)
5491 return MO_USE_NO_VAR;
5492 else if (target_for_debug_bind (var_debug_decl (expr)))
5493 return MO_CLOBBER;
5494 else if (track_loc_p (loc, expr, REG_OFFSET (loc),
5495 false, modep, NULL))
5496 return MO_USE;
5497 else
5498 return MO_USE_NO_VAR;
5499 }
5500 else if (MEM_P (loc))
5501 {
5502 expr = MEM_EXPR (loc);
5503
5504 if (!expr)
5505 return MO_CLOBBER;
5506 else if (target_for_debug_bind (var_debug_decl (expr)))
5507 return MO_CLOBBER;
5508 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc),
5509 false, modep, NULL)
5510 /* Multi-part variables shouldn't refer to one-part
5511 variable names such as VALUEs (never happens) or
5512 DEBUG_EXPRs (only happens in the presence of debug
5513 insns). */
5514 && (!MAY_HAVE_DEBUG_INSNS
5515 || !rtx_debug_expr_p (XEXP (loc, 0))))
5516 return MO_USE;
5517 else
5518 return MO_CLOBBER;
5519 }
5520
5521 return MO_CLOBBER;
5522 }
5523
5524 /* Log to OUT information about micro-operation MOPT involving X in
5525 INSN of BB. */
5526
5527 static inline void
5528 log_op_type (rtx x, basic_block bb, rtx_insn *insn,
5529 enum micro_operation_type mopt, FILE *out)
5530 {
5531 fprintf (out, "bb %i op %i insn %i %s ",
5532 bb->index, VTI (bb)->mos.length (),
5533 INSN_UID (insn), micro_operation_type_name[mopt]);
5534 print_inline_rtx (out, x, 2);
5535 fputc ('\n', out);
5536 }
5537
5538 /* Tell whether the CONCAT used to holds a VALUE and its location
5539 needs value resolution, i.e., an attempt of mapping the location
5540 back to other incoming values. */
5541 #define VAL_NEEDS_RESOLUTION(x) \
5542 (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil)
5543 /* Whether the location in the CONCAT is a tracked expression, that
5544 should also be handled like a MO_USE. */
5545 #define VAL_HOLDS_TRACK_EXPR(x) \
5546 (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used)
5547 /* Whether the location in the CONCAT should be handled like a MO_COPY
5548 as well. */
5549 #define VAL_EXPR_IS_COPIED(x) \
5550 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump)
5551 /* Whether the location in the CONCAT should be handled like a
5552 MO_CLOBBER as well. */
5553 #define VAL_EXPR_IS_CLOBBERED(x) \
5554 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
5555
5556 /* All preserved VALUEs. */
5557 static vec<rtx> preserved_values;
5558
5559 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
5560
5561 static void
5562 preserve_value (cselib_val *val)
5563 {
5564 cselib_preserve_value (val);
5565 preserved_values.safe_push (val->val_rtx);
5566 }
5567
5568 /* Helper function for MO_VAL_LOC handling. Return non-zero if
5569 any rtxes not suitable for CONST use not replaced by VALUEs
5570 are discovered. */
5571
5572 static bool
5573 non_suitable_const (const_rtx x)
5574 {
5575 subrtx_iterator::array_type array;
5576 FOR_EACH_SUBRTX (iter, array, x, ALL)
5577 {
5578 const_rtx x = *iter;
5579 switch (GET_CODE (x))
5580 {
5581 case REG:
5582 case DEBUG_EXPR:
5583 case PC:
5584 case SCRATCH:
5585 case CC0:
5586 case ASM_INPUT:
5587 case ASM_OPERANDS:
5588 return true;
5589 case MEM:
5590 if (!MEM_READONLY_P (x))
5591 return true;
5592 break;
5593 default:
5594 break;
5595 }
5596 }
5597 return false;
5598 }
5599
5600 /* Add uses (register and memory references) LOC which will be tracked
5601 to VTI (bb)->mos. */
5602
5603 static void
5604 add_uses (rtx loc, struct count_use_info *cui)
5605 {
5606 machine_mode mode = VOIDmode;
5607 enum micro_operation_type type = use_type (loc, cui, &mode);
5608
5609 if (type != MO_CLOBBER)
5610 {
5611 basic_block bb = cui->bb;
5612 micro_operation mo;
5613
5614 mo.type = type;
5615 mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc;
5616 mo.insn = cui->insn;
5617
5618 if (type == MO_VAL_LOC)
5619 {
5620 rtx oloc = loc;
5621 rtx vloc = PAT_VAR_LOCATION_LOC (oloc);
5622 cselib_val *val;
5623
5624 gcc_assert (cui->sets);
5625
5626 if (MEM_P (vloc)
5627 && !REG_P (XEXP (vloc, 0))
5628 && !MEM_P (XEXP (vloc, 0)))
5629 {
5630 rtx mloc = vloc;
5631 machine_mode address_mode = get_address_mode (mloc);
5632 cselib_val *val
5633 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5634 GET_MODE (mloc));
5635
5636 if (val && !cselib_preserved_value_p (val))
5637 preserve_value (val);
5638 }
5639
5640 if (CONSTANT_P (vloc)
5641 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc)))
5642 /* For constants don't look up any value. */;
5643 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc)
5644 && (val = find_use_val (vloc, GET_MODE (oloc), cui)))
5645 {
5646 machine_mode mode2;
5647 enum micro_operation_type type2;
5648 rtx nloc = NULL;
5649 bool resolvable = REG_P (vloc) || MEM_P (vloc);
5650
5651 if (resolvable)
5652 nloc = replace_expr_with_values (vloc);
5653
5654 if (nloc)
5655 {
5656 oloc = shallow_copy_rtx (oloc);
5657 PAT_VAR_LOCATION_LOC (oloc) = nloc;
5658 }
5659
5660 oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc);
5661
5662 type2 = use_type (vloc, 0, &mode2);
5663
5664 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5665 || type2 == MO_CLOBBER);
5666
5667 if (type2 == MO_CLOBBER
5668 && !cselib_preserved_value_p (val))
5669 {
5670 VAL_NEEDS_RESOLUTION (oloc) = resolvable;
5671 preserve_value (val);
5672 }
5673 }
5674 else if (!VAR_LOC_UNKNOWN_P (vloc))
5675 {
5676 oloc = shallow_copy_rtx (oloc);
5677 PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC ();
5678 }
5679
5680 mo.u.loc = oloc;
5681 }
5682 else if (type == MO_VAL_USE)
5683 {
5684 machine_mode mode2 = VOIDmode;
5685 enum micro_operation_type type2;
5686 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui);
5687 rtx vloc, oloc = loc, nloc;
5688
5689 gcc_assert (cui->sets);
5690
5691 if (MEM_P (oloc)
5692 && !REG_P (XEXP (oloc, 0))
5693 && !MEM_P (XEXP (oloc, 0)))
5694 {
5695 rtx mloc = oloc;
5696 machine_mode address_mode = get_address_mode (mloc);
5697 cselib_val *val
5698 = cselib_lookup (XEXP (mloc, 0), address_mode, 0,
5699 GET_MODE (mloc));
5700
5701 if (val && !cselib_preserved_value_p (val))
5702 preserve_value (val);
5703 }
5704
5705 type2 = use_type (loc, 0, &mode2);
5706
5707 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR
5708 || type2 == MO_CLOBBER);
5709
5710 if (type2 == MO_USE)
5711 vloc = var_lowpart (mode2, loc);
5712 else
5713 vloc = oloc;
5714
5715 /* The loc of a MO_VAL_USE may have two forms:
5716
5717 (concat val src): val is at src, a value-based
5718 representation.
5719
5720 (concat (concat val use) src): same as above, with use as
5721 the MO_USE tracked value, if it differs from src.
5722
5723 */
5724
5725 gcc_checking_assert (REG_P (loc) || MEM_P (loc));
5726 nloc = replace_expr_with_values (loc);
5727 if (!nloc)
5728 nloc = oloc;
5729
5730 if (vloc != nloc)
5731 oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc);
5732 else
5733 oloc = val->val_rtx;
5734
5735 mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc);
5736
5737 if (type2 == MO_USE)
5738 VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1;
5739 if (!cselib_preserved_value_p (val))
5740 {
5741 VAL_NEEDS_RESOLUTION (mo.u.loc) = 1;
5742 preserve_value (val);
5743 }
5744 }
5745 else
5746 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR);
5747
5748 if (dump_file && (dump_flags & TDF_DETAILS))
5749 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
5750 VTI (bb)->mos.safe_push (mo);
5751 }
5752 }
5753
5754 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */
5755
5756 static void
5757 add_uses_1 (rtx *x, void *cui)
5758 {
5759 subrtx_var_iterator::array_type array;
5760 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST)
5761 add_uses (*iter, (struct count_use_info *) cui);
5762 }
5763
5764 /* This is the value used during expansion of locations. We want it
5765 to be unbounded, so that variables expanded deep in a recursion
5766 nest are fully evaluated, so that their values are cached
5767 correctly. We avoid recursion cycles through other means, and we
5768 don't unshare RTL, so excess complexity is not a problem. */
5769 #define EXPR_DEPTH (INT_MAX)
5770 /* We use this to keep too-complex expressions from being emitted as
5771 location notes, and then to debug information. Users can trade
5772 compile time for ridiculously complex expressions, although they're
5773 seldom useful, and they may often have to be discarded as not
5774 representable anyway. */
5775 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH))
5776
5777 /* Attempt to reverse the EXPR operation in the debug info and record
5778 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is
5779 no longer live we can express its value as VAL - 6. */
5780
5781 static void
5782 reverse_op (rtx val, const_rtx expr, rtx_insn *insn)
5783 {
5784 rtx src, arg, ret;
5785 cselib_val *v;
5786 struct elt_loc_list *l;
5787 enum rtx_code code;
5788 int count;
5789
5790 if (GET_CODE (expr) != SET)
5791 return;
5792
5793 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr)))
5794 return;
5795
5796 src = SET_SRC (expr);
5797 switch (GET_CODE (src))
5798 {
5799 case PLUS:
5800 case MINUS:
5801 case XOR:
5802 case NOT:
5803 case NEG:
5804 if (!REG_P (XEXP (src, 0)))
5805 return;
5806 break;
5807 case SIGN_EXTEND:
5808 case ZERO_EXTEND:
5809 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0)))
5810 return;
5811 break;
5812 default:
5813 return;
5814 }
5815
5816 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx)
5817 return;
5818
5819 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode);
5820 if (!v || !cselib_preserved_value_p (v))
5821 return;
5822
5823 /* Use canonical V to avoid creating multiple redundant expressions
5824 for different VALUES equivalent to V. */
5825 v = canonical_cselib_val (v);
5826
5827 /* Adding a reverse op isn't useful if V already has an always valid
5828 location. Ignore ENTRY_VALUE, while it is always constant, we should
5829 prefer non-ENTRY_VALUE locations whenever possible. */
5830 for (l = v->locs, count = 0; l; l = l->next, count++)
5831 if (CONSTANT_P (l->loc)
5832 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0)))
5833 return;
5834 /* Avoid creating too large locs lists. */
5835 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE))
5836 return;
5837
5838 switch (GET_CODE (src))
5839 {
5840 case NOT:
5841 case NEG:
5842 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5843 return;
5844 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val);
5845 break;
5846 case SIGN_EXTEND:
5847 case ZERO_EXTEND:
5848 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val);
5849 break;
5850 case XOR:
5851 code = XOR;
5852 goto binary;
5853 case PLUS:
5854 code = MINUS;
5855 goto binary;
5856 case MINUS:
5857 code = PLUS;
5858 goto binary;
5859 binary:
5860 if (GET_MODE (v->val_rtx) != GET_MODE (val))
5861 return;
5862 arg = XEXP (src, 1);
5863 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5864 {
5865 arg = cselib_expand_value_rtx (arg, scratch_regs, 5);
5866 if (arg == NULL_RTX)
5867 return;
5868 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF)
5869 return;
5870 }
5871 ret = simplify_gen_binary (code, GET_MODE (val), val, arg);
5872 if (ret == val)
5873 /* Ensure ret isn't VALUE itself (which can happen e.g. for
5874 (plus (reg1) (reg2)) when reg2 is known to be 0), as that
5875 breaks a lot of routines during var-tracking. */
5876 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx);
5877 break;
5878 default:
5879 gcc_unreachable ();
5880 }
5881
5882 cselib_add_permanent_equiv (v, ret, insn);
5883 }
5884
5885 /* Add stores (register and memory references) LOC which will be tracked
5886 to VTI (bb)->mos. EXPR is the RTL expression containing the store.
5887 CUIP->insn is instruction which the LOC is part of. */
5888
5889 static void
5890 add_stores (rtx loc, const_rtx expr, void *cuip)
5891 {
5892 machine_mode mode = VOIDmode, mode2;
5893 struct count_use_info *cui = (struct count_use_info *)cuip;
5894 basic_block bb = cui->bb;
5895 micro_operation mo;
5896 rtx oloc = loc, nloc, src = NULL;
5897 enum micro_operation_type type = use_type (loc, cui, &mode);
5898 bool track_p = false;
5899 cselib_val *v;
5900 bool resolve, preserve;
5901
5902 if (type == MO_CLOBBER)
5903 return;
5904
5905 mode2 = mode;
5906
5907 if (REG_P (loc))
5908 {
5909 gcc_assert (loc != cfa_base_rtx);
5910 if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET)
5911 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE)
5912 || GET_CODE (expr) == CLOBBER)
5913 {
5914 mo.type = MO_CLOBBER;
5915 mo.u.loc = loc;
5916 if (GET_CODE (expr) == SET
5917 && SET_DEST (expr) == loc
5918 && !unsuitable_loc (SET_SRC (expr))
5919 && find_use_val (loc, mode, cui))
5920 {
5921 gcc_checking_assert (type == MO_VAL_SET);
5922 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr));
5923 }
5924 }
5925 else
5926 {
5927 if (GET_CODE (expr) == SET
5928 && SET_DEST (expr) == loc
5929 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5930 src = var_lowpart (mode2, SET_SRC (expr));
5931 loc = var_lowpart (mode2, loc);
5932
5933 if (src == NULL)
5934 {
5935 mo.type = MO_SET;
5936 mo.u.loc = loc;
5937 }
5938 else
5939 {
5940 rtx xexpr = gen_rtx_SET (loc, src);
5941 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc)))
5942 {
5943 /* If this is an instruction copying (part of) a parameter
5944 passed by invisible reference to its register location,
5945 pretend it's a SET so that the initial memory location
5946 is discarded, as the parameter register can be reused
5947 for other purposes and we do not track locations based
5948 on generic registers. */
5949 if (MEM_P (src)
5950 && REG_EXPR (loc)
5951 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
5952 && DECL_MODE (REG_EXPR (loc)) != BLKmode
5953 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
5954 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0)
5955 != arg_pointer_rtx)
5956 mo.type = MO_SET;
5957 else
5958 mo.type = MO_COPY;
5959 }
5960 else
5961 mo.type = MO_SET;
5962 mo.u.loc = xexpr;
5963 }
5964 }
5965 mo.insn = cui->insn;
5966 }
5967 else if (MEM_P (loc)
5968 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE)
5969 || cui->sets))
5970 {
5971 if (MEM_P (loc) && type == MO_VAL_SET
5972 && !REG_P (XEXP (loc, 0))
5973 && !MEM_P (XEXP (loc, 0)))
5974 {
5975 rtx mloc = loc;
5976 machine_mode address_mode = get_address_mode (mloc);
5977 cselib_val *val = cselib_lookup (XEXP (mloc, 0),
5978 address_mode, 0,
5979 GET_MODE (mloc));
5980
5981 if (val && !cselib_preserved_value_p (val))
5982 preserve_value (val);
5983 }
5984
5985 if (GET_CODE (expr) == CLOBBER || !track_p)
5986 {
5987 mo.type = MO_CLOBBER;
5988 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc;
5989 }
5990 else
5991 {
5992 if (GET_CODE (expr) == SET
5993 && SET_DEST (expr) == loc
5994 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS)
5995 src = var_lowpart (mode2, SET_SRC (expr));
5996 loc = var_lowpart (mode2, loc);
5997
5998 if (src == NULL)
5999 {
6000 mo.type = MO_SET;
6001 mo.u.loc = loc;
6002 }
6003 else
6004 {
6005 rtx xexpr = gen_rtx_SET (loc, src);
6006 if (same_variable_part_p (SET_SRC (xexpr),
6007 MEM_EXPR (loc),
6008 INT_MEM_OFFSET (loc)))
6009 mo.type = MO_COPY;
6010 else
6011 mo.type = MO_SET;
6012 mo.u.loc = xexpr;
6013 }
6014 }
6015 mo.insn = cui->insn;
6016 }
6017 else
6018 return;
6019
6020 if (type != MO_VAL_SET)
6021 goto log_and_return;
6022
6023 v = find_use_val (oloc, mode, cui);
6024
6025 if (!v)
6026 goto log_and_return;
6027
6028 resolve = preserve = !cselib_preserved_value_p (v);
6029
6030 /* We cannot track values for multiple-part variables, so we track only
6031 locations for tracked parameters passed either by invisible reference
6032 or directly in multiple locations. */
6033 if (track_p
6034 && REG_P (loc)
6035 && REG_EXPR (loc)
6036 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL
6037 && DECL_MODE (REG_EXPR (loc)) != BLKmode
6038 && TREE_CODE (TREE_TYPE (REG_EXPR (loc))) != UNION_TYPE
6039 && ((MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc)))
6040 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) != arg_pointer_rtx)
6041 || (GET_CODE (DECL_INCOMING_RTL (REG_EXPR (loc))) == PARALLEL
6042 && XVECLEN (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) > 1)))
6043 {
6044 /* Although we don't use the value here, it could be used later by the
6045 mere virtue of its existence as the operand of the reverse operation
6046 that gave rise to it (typically extension/truncation). Make sure it
6047 is preserved as required by vt_expand_var_loc_chain. */
6048 if (preserve)
6049 preserve_value (v);
6050 goto log_and_return;
6051 }
6052
6053 if (loc == stack_pointer_rtx
6054 && hard_frame_pointer_adjustment != -1
6055 && preserve)
6056 cselib_set_value_sp_based (v);
6057
6058 nloc = replace_expr_with_values (oloc);
6059 if (nloc)
6060 oloc = nloc;
6061
6062 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC)
6063 {
6064 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode);
6065
6066 if (oval == v)
6067 return;
6068 gcc_assert (REG_P (oloc) || MEM_P (oloc));
6069
6070 if (oval && !cselib_preserved_value_p (oval))
6071 {
6072 micro_operation moa;
6073
6074 preserve_value (oval);
6075
6076 moa.type = MO_VAL_USE;
6077 moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc);
6078 VAL_NEEDS_RESOLUTION (moa.u.loc) = 1;
6079 moa.insn = cui->insn;
6080
6081 if (dump_file && (dump_flags & TDF_DETAILS))
6082 log_op_type (moa.u.loc, cui->bb, cui->insn,
6083 moa.type, dump_file);
6084 VTI (bb)->mos.safe_push (moa);
6085 }
6086
6087 resolve = false;
6088 }
6089 else if (resolve && GET_CODE (mo.u.loc) == SET)
6090 {
6091 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr)))
6092 nloc = replace_expr_with_values (SET_SRC (expr));
6093 else
6094 nloc = NULL_RTX;
6095
6096 /* Avoid the mode mismatch between oexpr and expr. */
6097 if (!nloc && mode != mode2)
6098 {
6099 nloc = SET_SRC (expr);
6100 gcc_assert (oloc == SET_DEST (expr));
6101 }
6102
6103 if (nloc && nloc != SET_SRC (mo.u.loc))
6104 oloc = gen_rtx_SET (oloc, nloc);
6105 else
6106 {
6107 if (oloc == SET_DEST (mo.u.loc))
6108 /* No point in duplicating. */
6109 oloc = mo.u.loc;
6110 if (!REG_P (SET_SRC (mo.u.loc)))
6111 resolve = false;
6112 }
6113 }
6114 else if (!resolve)
6115 {
6116 if (GET_CODE (mo.u.loc) == SET
6117 && oloc == SET_DEST (mo.u.loc))
6118 /* No point in duplicating. */
6119 oloc = mo.u.loc;
6120 }
6121 else
6122 resolve = false;
6123
6124 loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc);
6125
6126 if (mo.u.loc != oloc)
6127 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc);
6128
6129 /* The loc of a MO_VAL_SET may have various forms:
6130
6131 (concat val dst): dst now holds val
6132
6133 (concat val (set dst src)): dst now holds val, copied from src
6134
6135 (concat (concat val dstv) dst): dst now holds val; dstv is dst
6136 after replacing mems and non-top-level regs with values.
6137
6138 (concat (concat val dstv) (set dst src)): dst now holds val,
6139 copied from src. dstv is a value-based representation of dst, if
6140 it differs from dst. If resolution is needed, src is a REG, and
6141 its mode is the same as that of val.
6142
6143 (concat (concat val (set dstv srcv)) (set dst src)): src
6144 copied to dst, holding val. dstv and srcv are value-based
6145 representations of dst and src, respectively.
6146
6147 */
6148
6149 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC)
6150 reverse_op (v->val_rtx, expr, cui->insn);
6151
6152 mo.u.loc = loc;
6153
6154 if (track_p)
6155 VAL_HOLDS_TRACK_EXPR (loc) = 1;
6156 if (preserve)
6157 {
6158 VAL_NEEDS_RESOLUTION (loc) = resolve;
6159 preserve_value (v);
6160 }
6161 if (mo.type == MO_CLOBBER)
6162 VAL_EXPR_IS_CLOBBERED (loc) = 1;
6163 if (mo.type == MO_COPY)
6164 VAL_EXPR_IS_COPIED (loc) = 1;
6165
6166 mo.type = MO_VAL_SET;
6167
6168 log_and_return:
6169 if (dump_file && (dump_flags & TDF_DETAILS))
6170 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
6171 VTI (bb)->mos.safe_push (mo);
6172 }
6173
6174 /* Arguments to the call. */
6175 static rtx call_arguments;
6176
6177 /* Compute call_arguments. */
6178
6179 static void
6180 prepare_call_arguments (basic_block bb, rtx_insn *insn)
6181 {
6182 rtx link, x, call;
6183 rtx prev, cur, next;
6184 rtx this_arg = NULL_RTX;
6185 tree type = NULL_TREE, t, fndecl = NULL_TREE;
6186 tree obj_type_ref = NULL_TREE;
6187 CUMULATIVE_ARGS args_so_far_v;
6188 cumulative_args_t args_so_far;
6189
6190 memset (&args_so_far_v, 0, sizeof (args_so_far_v));
6191 args_so_far = pack_cumulative_args (&args_so_far_v);
6192 call = get_call_rtx_from (insn);
6193 if (call)
6194 {
6195 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
6196 {
6197 rtx symbol = XEXP (XEXP (call, 0), 0);
6198 if (SYMBOL_REF_DECL (symbol))
6199 fndecl = SYMBOL_REF_DECL (symbol);
6200 }
6201 if (fndecl == NULL_TREE)
6202 fndecl = MEM_EXPR (XEXP (call, 0));
6203 if (fndecl
6204 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE
6205 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE)
6206 fndecl = NULL_TREE;
6207 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
6208 type = TREE_TYPE (fndecl);
6209 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL)
6210 {
6211 if (TREE_CODE (fndecl) == INDIRECT_REF
6212 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF)
6213 obj_type_ref = TREE_OPERAND (fndecl, 0);
6214 fndecl = NULL_TREE;
6215 }
6216 if (type)
6217 {
6218 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node;
6219 t = TREE_CHAIN (t))
6220 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE
6221 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t))))
6222 break;
6223 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE)
6224 type = NULL;
6225 else
6226 {
6227 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type));
6228 link = CALL_INSN_FUNCTION_USAGE (insn);
6229 #ifndef PCC_STATIC_STRUCT_RETURN
6230 if (aggregate_value_p (TREE_TYPE (type), type)
6231 && targetm.calls.struct_value_rtx (type, 0) == 0)
6232 {
6233 tree struct_addr = build_pointer_type (TREE_TYPE (type));
6234 machine_mode mode = TYPE_MODE (struct_addr);
6235 rtx reg;
6236 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6237 nargs + 1);
6238 reg = targetm.calls.function_arg (args_so_far, mode,
6239 struct_addr, true);
6240 targetm.calls.function_arg_advance (args_so_far, mode,
6241 struct_addr, true);
6242 if (reg == NULL_RTX)
6243 {
6244 for (; link; link = XEXP (link, 1))
6245 if (GET_CODE (XEXP (link, 0)) == USE
6246 && MEM_P (XEXP (XEXP (link, 0), 0)))
6247 {
6248 link = XEXP (link, 1);
6249 break;
6250 }
6251 }
6252 }
6253 else
6254 #endif
6255 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl,
6256 nargs);
6257 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node)
6258 {
6259 machine_mode mode;
6260 t = TYPE_ARG_TYPES (type);
6261 mode = TYPE_MODE (TREE_VALUE (t));
6262 this_arg = targetm.calls.function_arg (args_so_far, mode,
6263 TREE_VALUE (t), true);
6264 if (this_arg && !REG_P (this_arg))
6265 this_arg = NULL_RTX;
6266 else if (this_arg == NULL_RTX)
6267 {
6268 for (; link; link = XEXP (link, 1))
6269 if (GET_CODE (XEXP (link, 0)) == USE
6270 && MEM_P (XEXP (XEXP (link, 0), 0)))
6271 {
6272 this_arg = XEXP (XEXP (link, 0), 0);
6273 break;
6274 }
6275 }
6276 }
6277 }
6278 }
6279 }
6280 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE;
6281
6282 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
6283 if (GET_CODE (XEXP (link, 0)) == USE)
6284 {
6285 rtx item = NULL_RTX;
6286 x = XEXP (XEXP (link, 0), 0);
6287 if (GET_MODE (link) == VOIDmode
6288 || GET_MODE (link) == BLKmode
6289 || (GET_MODE (link) != GET_MODE (x)
6290 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT
6291 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT)
6292 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT
6293 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT))))
6294 /* Can't do anything for these, if the original type mode
6295 isn't known or can't be converted. */;
6296 else if (REG_P (x))
6297 {
6298 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6299 if (val && cselib_preserved_value_p (val))
6300 item = val->val_rtx;
6301 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
6302 || GET_MODE_CLASS (GET_MODE (x)) == MODE_PARTIAL_INT)
6303 {
6304 machine_mode mode = GET_MODE (x);
6305
6306 while ((mode = GET_MODE_WIDER_MODE (mode)) != VOIDmode
6307 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD)
6308 {
6309 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0);
6310
6311 if (reg == NULL_RTX || !REG_P (reg))
6312 continue;
6313 val = cselib_lookup (reg, mode, 0, VOIDmode);
6314 if (val && cselib_preserved_value_p (val))
6315 {
6316 item = val->val_rtx;
6317 break;
6318 }
6319 }
6320 }
6321 }
6322 else if (MEM_P (x))
6323 {
6324 rtx mem = x;
6325 cselib_val *val;
6326
6327 if (!frame_pointer_needed)
6328 {
6329 struct adjust_mem_data amd;
6330 amd.mem_mode = VOIDmode;
6331 amd.stack_adjust = -VTI (bb)->out.stack_adjust;
6332 amd.side_effects = NULL;
6333 amd.store = true;
6334 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems,
6335 &amd);
6336 gcc_assert (amd.side_effects == NULL_RTX);
6337 }
6338 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode);
6339 if (val && cselib_preserved_value_p (val))
6340 item = val->val_rtx;
6341 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT
6342 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT)
6343 {
6344 /* For non-integer stack argument see also if they weren't
6345 initialized by integers. */
6346 machine_mode imode = int_mode_for_mode (GET_MODE (mem));
6347 if (imode != GET_MODE (mem) && imode != BLKmode)
6348 {
6349 val = cselib_lookup (adjust_address_nv (mem, imode, 0),
6350 imode, 0, VOIDmode);
6351 if (val && cselib_preserved_value_p (val))
6352 item = lowpart_subreg (GET_MODE (x), val->val_rtx,
6353 imode);
6354 }
6355 }
6356 }
6357 if (item)
6358 {
6359 rtx x2 = x;
6360 if (GET_MODE (item) != GET_MODE (link))
6361 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item));
6362 if (GET_MODE (x2) != GET_MODE (link))
6363 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2));
6364 item = gen_rtx_CONCAT (GET_MODE (link), x2, item);
6365 call_arguments
6366 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments);
6367 }
6368 if (t && t != void_list_node)
6369 {
6370 tree argtype = TREE_VALUE (t);
6371 machine_mode mode = TYPE_MODE (argtype);
6372 rtx reg;
6373 if (pass_by_reference (&args_so_far_v, mode, argtype, true))
6374 {
6375 argtype = build_pointer_type (argtype);
6376 mode = TYPE_MODE (argtype);
6377 }
6378 reg = targetm.calls.function_arg (args_so_far, mode,
6379 argtype, true);
6380 if (TREE_CODE (argtype) == REFERENCE_TYPE
6381 && INTEGRAL_TYPE_P (TREE_TYPE (argtype))
6382 && reg
6383 && REG_P (reg)
6384 && GET_MODE (reg) == mode
6385 && (GET_MODE_CLASS (mode) == MODE_INT
6386 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
6387 && REG_P (x)
6388 && REGNO (x) == REGNO (reg)
6389 && GET_MODE (x) == mode
6390 && item)
6391 {
6392 machine_mode indmode
6393 = TYPE_MODE (TREE_TYPE (argtype));
6394 rtx mem = gen_rtx_MEM (indmode, x);
6395 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode);
6396 if (val && cselib_preserved_value_p (val))
6397 {
6398 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx);
6399 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6400 call_arguments);
6401 }
6402 else
6403 {
6404 struct elt_loc_list *l;
6405 tree initial;
6406
6407 /* Try harder, when passing address of a constant
6408 pool integer it can be easily read back. */
6409 item = XEXP (item, 1);
6410 if (GET_CODE (item) == SUBREG)
6411 item = SUBREG_REG (item);
6412 gcc_assert (GET_CODE (item) == VALUE);
6413 val = CSELIB_VAL_PTR (item);
6414 for (l = val->locs; l; l = l->next)
6415 if (GET_CODE (l->loc) == SYMBOL_REF
6416 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc)
6417 && SYMBOL_REF_DECL (l->loc)
6418 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc)))
6419 {
6420 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc));
6421 if (tree_fits_shwi_p (initial))
6422 {
6423 item = GEN_INT (tree_to_shwi (initial));
6424 item = gen_rtx_CONCAT (indmode, mem, item);
6425 call_arguments
6426 = gen_rtx_EXPR_LIST (VOIDmode, item,
6427 call_arguments);
6428 }
6429 break;
6430 }
6431 }
6432 }
6433 targetm.calls.function_arg_advance (args_so_far, mode,
6434 argtype, true);
6435 t = TREE_CHAIN (t);
6436 }
6437 }
6438
6439 /* Add debug arguments. */
6440 if (fndecl
6441 && TREE_CODE (fndecl) == FUNCTION_DECL
6442 && DECL_HAS_DEBUG_ARGS_P (fndecl))
6443 {
6444 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
6445 if (debug_args)
6446 {
6447 unsigned int ix;
6448 tree param;
6449 for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
6450 {
6451 rtx item;
6452 tree dtemp = (**debug_args)[ix + 1];
6453 machine_mode mode = DECL_MODE (dtemp);
6454 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
6455 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
6456 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item,
6457 call_arguments);
6458 }
6459 }
6460 }
6461
6462 /* Reverse call_arguments chain. */
6463 prev = NULL_RTX;
6464 for (cur = call_arguments; cur; cur = next)
6465 {
6466 next = XEXP (cur, 1);
6467 XEXP (cur, 1) = prev;
6468 prev = cur;
6469 }
6470 call_arguments = prev;
6471
6472 x = get_call_rtx_from (insn);
6473 if (x)
6474 {
6475 x = XEXP (XEXP (x, 0), 0);
6476 if (GET_CODE (x) == SYMBOL_REF)
6477 /* Don't record anything. */;
6478 else if (CONSTANT_P (x))
6479 {
6480 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x),
6481 pc_rtx, x);
6482 call_arguments
6483 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6484 }
6485 else
6486 {
6487 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
6488 if (val && cselib_preserved_value_p (val))
6489 {
6490 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx);
6491 call_arguments
6492 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6493 }
6494 }
6495 }
6496 if (this_arg)
6497 {
6498 machine_mode mode
6499 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref)));
6500 rtx clobbered = gen_rtx_MEM (mode, this_arg);
6501 HOST_WIDE_INT token
6502 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref));
6503 if (token)
6504 clobbered = plus_constant (mode, clobbered,
6505 token * GET_MODE_SIZE (mode));
6506 clobbered = gen_rtx_MEM (mode, clobbered);
6507 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered);
6508 call_arguments
6509 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments);
6510 }
6511 }
6512
6513 /* Callback for cselib_record_sets_hook, that records as micro
6514 operations uses and stores in an insn after cselib_record_sets has
6515 analyzed the sets in an insn, but before it modifies the stored
6516 values in the internal tables, unless cselib_record_sets doesn't
6517 call it directly (perhaps because we're not doing cselib in the
6518 first place, in which case sets and n_sets will be 0). */
6519
6520 static void
6521 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets)
6522 {
6523 basic_block bb = BLOCK_FOR_INSN (insn);
6524 int n1, n2;
6525 struct count_use_info cui;
6526 micro_operation *mos;
6527
6528 cselib_hook_called = true;
6529
6530 cui.insn = insn;
6531 cui.bb = bb;
6532 cui.sets = sets;
6533 cui.n_sets = n_sets;
6534
6535 n1 = VTI (bb)->mos.length ();
6536 cui.store_p = false;
6537 note_uses (&PATTERN (insn), add_uses_1, &cui);
6538 n2 = VTI (bb)->mos.length () - 1;
6539 mos = VTI (bb)->mos.address ();
6540
6541 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
6542 MO_VAL_LOC last. */
6543 while (n1 < n2)
6544 {
6545 while (n1 < n2 && mos[n1].type == MO_USE)
6546 n1++;
6547 while (n1 < n2 && mos[n2].type != MO_USE)
6548 n2--;
6549 if (n1 < n2)
6550 std::swap (mos[n1], mos[n2]);
6551 }
6552
6553 n2 = VTI (bb)->mos.length () - 1;
6554 while (n1 < n2)
6555 {
6556 while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
6557 n1++;
6558 while (n1 < n2 && mos[n2].type == MO_VAL_LOC)
6559 n2--;
6560 if (n1 < n2)
6561 std::swap (mos[n1], mos[n2]);
6562 }
6563
6564 if (CALL_P (insn))
6565 {
6566 micro_operation mo;
6567
6568 mo.type = MO_CALL;
6569 mo.insn = insn;
6570 mo.u.loc = call_arguments;
6571 call_arguments = NULL_RTX;
6572
6573 if (dump_file && (dump_flags & TDF_DETAILS))
6574 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
6575 VTI (bb)->mos.safe_push (mo);
6576 }
6577
6578 n1 = VTI (bb)->mos.length ();
6579 /* This will record NEXT_INSN (insn), such that we can
6580 insert notes before it without worrying about any
6581 notes that MO_USEs might emit after the insn. */
6582 cui.store_p = true;
6583 note_stores (PATTERN (insn), add_stores, &cui);
6584 n2 = VTI (bb)->mos.length () - 1;
6585 mos = VTI (bb)->mos.address ();
6586
6587 /* Order the MO_VAL_USEs first (note_stores does nothing
6588 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
6589 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */
6590 while (n1 < n2)
6591 {
6592 while (n1 < n2 && mos[n1].type == MO_VAL_USE)
6593 n1++;
6594 while (n1 < n2 && mos[n2].type != MO_VAL_USE)
6595 n2--;
6596 if (n1 < n2)
6597 std::swap (mos[n1], mos[n2]);
6598 }
6599
6600 n2 = VTI (bb)->mos.length () - 1;
6601 while (n1 < n2)
6602 {
6603 while (n1 < n2 && mos[n1].type == MO_CLOBBER)
6604 n1++;
6605 while (n1 < n2 && mos[n2].type != MO_CLOBBER)
6606 n2--;
6607 if (n1 < n2)
6608 std::swap (mos[n1], mos[n2]);
6609 }
6610 }
6611
6612 static enum var_init_status
6613 find_src_status (dataflow_set *in, rtx src)
6614 {
6615 tree decl = NULL_TREE;
6616 enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED;
6617
6618 if (! flag_var_tracking_uninit)
6619 status = VAR_INIT_STATUS_INITIALIZED;
6620
6621 if (src && REG_P (src))
6622 decl = var_debug_decl (REG_EXPR (src));
6623 else if (src && MEM_P (src))
6624 decl = var_debug_decl (MEM_EXPR (src));
6625
6626 if (src && decl)
6627 status = get_init_value (in, src, dv_from_decl (decl));
6628
6629 return status;
6630 }
6631
6632 /* SRC is the source of an assignment. Use SET to try to find what
6633 was ultimately assigned to SRC. Return that value if known,
6634 otherwise return SRC itself. */
6635
6636 static rtx
6637 find_src_set_src (dataflow_set *set, rtx src)
6638 {
6639 tree decl = NULL_TREE; /* The variable being copied around. */
6640 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */
6641 variable var;
6642 location_chain nextp;
6643 int i;
6644 bool found;
6645
6646 if (src && REG_P (src))
6647 decl = var_debug_decl (REG_EXPR (src));
6648 else if (src && MEM_P (src))
6649 decl = var_debug_decl (MEM_EXPR (src));
6650
6651 if (src && decl)
6652 {
6653 decl_or_value dv = dv_from_decl (decl);
6654
6655 var = shared_hash_find (set->vars, dv);
6656 if (var)
6657 {
6658 found = false;
6659 for (i = 0; i < var->n_var_parts && !found; i++)
6660 for (nextp = var->var_part[i].loc_chain; nextp && !found;
6661 nextp = nextp->next)
6662 if (rtx_equal_p (nextp->loc, src))
6663 {
6664 set_src = nextp->set_src;
6665 found = true;
6666 }
6667
6668 }
6669 }
6670
6671 return set_src;
6672 }
6673
6674 /* Compute the changes of variable locations in the basic block BB. */
6675
6676 static bool
6677 compute_bb_dataflow (basic_block bb)
6678 {
6679 unsigned int i;
6680 micro_operation *mo;
6681 bool changed;
6682 dataflow_set old_out;
6683 dataflow_set *in = &VTI (bb)->in;
6684 dataflow_set *out = &VTI (bb)->out;
6685
6686 dataflow_set_init (&old_out);
6687 dataflow_set_copy (&old_out, out);
6688 dataflow_set_copy (out, in);
6689
6690 if (MAY_HAVE_DEBUG_INSNS)
6691 local_get_addr_cache = new hash_map<rtx, rtx>;
6692
6693 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
6694 {
6695 rtx_insn *insn = mo->insn;
6696
6697 switch (mo->type)
6698 {
6699 case MO_CALL:
6700 dataflow_set_clear_at_call (out);
6701 break;
6702
6703 case MO_USE:
6704 {
6705 rtx loc = mo->u.loc;
6706
6707 if (REG_P (loc))
6708 var_reg_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6709 else if (MEM_P (loc))
6710 var_mem_set (out, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
6711 }
6712 break;
6713
6714 case MO_VAL_LOC:
6715 {
6716 rtx loc = mo->u.loc;
6717 rtx val, vloc;
6718 tree var;
6719
6720 if (GET_CODE (loc) == CONCAT)
6721 {
6722 val = XEXP (loc, 0);
6723 vloc = XEXP (loc, 1);
6724 }
6725 else
6726 {
6727 val = NULL_RTX;
6728 vloc = loc;
6729 }
6730
6731 var = PAT_VAR_LOCATION_DECL (vloc);
6732
6733 clobber_variable_part (out, NULL_RTX,
6734 dv_from_decl (var), 0, NULL_RTX);
6735 if (val)
6736 {
6737 if (VAL_NEEDS_RESOLUTION (loc))
6738 val_resolve (out, val, PAT_VAR_LOCATION_LOC (vloc), insn);
6739 set_variable_part (out, val, dv_from_decl (var), 0,
6740 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6741 INSERT);
6742 }
6743 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
6744 set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc),
6745 dv_from_decl (var), 0,
6746 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
6747 INSERT);
6748 }
6749 break;
6750
6751 case MO_VAL_USE:
6752 {
6753 rtx loc = mo->u.loc;
6754 rtx val, vloc, uloc;
6755
6756 vloc = uloc = XEXP (loc, 1);
6757 val = XEXP (loc, 0);
6758
6759 if (GET_CODE (val) == CONCAT)
6760 {
6761 uloc = XEXP (val, 1);
6762 val = XEXP (val, 0);
6763 }
6764
6765 if (VAL_NEEDS_RESOLUTION (loc))
6766 val_resolve (out, val, vloc, insn);
6767 else
6768 val_store (out, val, uloc, insn, false);
6769
6770 if (VAL_HOLDS_TRACK_EXPR (loc))
6771 {
6772 if (GET_CODE (uloc) == REG)
6773 var_reg_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6774 NULL);
6775 else if (GET_CODE (uloc) == MEM)
6776 var_mem_set (out, uloc, VAR_INIT_STATUS_UNINITIALIZED,
6777 NULL);
6778 }
6779 }
6780 break;
6781
6782 case MO_VAL_SET:
6783 {
6784 rtx loc = mo->u.loc;
6785 rtx val, vloc, uloc;
6786 rtx dstv, srcv;
6787
6788 vloc = loc;
6789 uloc = XEXP (vloc, 1);
6790 val = XEXP (vloc, 0);
6791 vloc = uloc;
6792
6793 if (GET_CODE (uloc) == SET)
6794 {
6795 dstv = SET_DEST (uloc);
6796 srcv = SET_SRC (uloc);
6797 }
6798 else
6799 {
6800 dstv = uloc;
6801 srcv = NULL;
6802 }
6803
6804 if (GET_CODE (val) == CONCAT)
6805 {
6806 dstv = vloc = XEXP (val, 1);
6807 val = XEXP (val, 0);
6808 }
6809
6810 if (GET_CODE (vloc) == SET)
6811 {
6812 srcv = SET_SRC (vloc);
6813
6814 gcc_assert (val != srcv);
6815 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
6816
6817 dstv = vloc = SET_DEST (vloc);
6818
6819 if (VAL_NEEDS_RESOLUTION (loc))
6820 val_resolve (out, val, srcv, insn);
6821 }
6822 else if (VAL_NEEDS_RESOLUTION (loc))
6823 {
6824 gcc_assert (GET_CODE (uloc) == SET
6825 && GET_CODE (SET_SRC (uloc)) == REG);
6826 val_resolve (out, val, SET_SRC (uloc), insn);
6827 }
6828
6829 if (VAL_HOLDS_TRACK_EXPR (loc))
6830 {
6831 if (VAL_EXPR_IS_CLOBBERED (loc))
6832 {
6833 if (REG_P (uloc))
6834 var_reg_delete (out, uloc, true);
6835 else if (MEM_P (uloc))
6836 {
6837 gcc_assert (MEM_P (dstv));
6838 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
6839 var_mem_delete (out, dstv, true);
6840 }
6841 }
6842 else
6843 {
6844 bool copied_p = VAL_EXPR_IS_COPIED (loc);
6845 rtx src = NULL, dst = uloc;
6846 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
6847
6848 if (GET_CODE (uloc) == SET)
6849 {
6850 src = SET_SRC (uloc);
6851 dst = SET_DEST (uloc);
6852 }
6853
6854 if (copied_p)
6855 {
6856 if (flag_var_tracking_uninit)
6857 {
6858 status = find_src_status (in, src);
6859
6860 if (status == VAR_INIT_STATUS_UNKNOWN)
6861 status = find_src_status (out, src);
6862 }
6863
6864 src = find_src_set_src (in, src);
6865 }
6866
6867 if (REG_P (dst))
6868 var_reg_delete_and_set (out, dst, !copied_p,
6869 status, srcv);
6870 else if (MEM_P (dst))
6871 {
6872 gcc_assert (MEM_P (dstv));
6873 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
6874 var_mem_delete_and_set (out, dstv, !copied_p,
6875 status, srcv);
6876 }
6877 }
6878 }
6879 else if (REG_P (uloc))
6880 var_regno_delete (out, REGNO (uloc));
6881 else if (MEM_P (uloc))
6882 {
6883 gcc_checking_assert (GET_CODE (vloc) == MEM);
6884 gcc_checking_assert (dstv == vloc);
6885 if (dstv != vloc)
6886 clobber_overlapping_mems (out, vloc);
6887 }
6888
6889 val_store (out, val, dstv, insn, true);
6890 }
6891 break;
6892
6893 case MO_SET:
6894 {
6895 rtx loc = mo->u.loc;
6896 rtx set_src = NULL;
6897
6898 if (GET_CODE (loc) == SET)
6899 {
6900 set_src = SET_SRC (loc);
6901 loc = SET_DEST (loc);
6902 }
6903
6904 if (REG_P (loc))
6905 var_reg_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6906 set_src);
6907 else if (MEM_P (loc))
6908 var_mem_delete_and_set (out, loc, true, VAR_INIT_STATUS_INITIALIZED,
6909 set_src);
6910 }
6911 break;
6912
6913 case MO_COPY:
6914 {
6915 rtx loc = mo->u.loc;
6916 enum var_init_status src_status;
6917 rtx set_src = NULL;
6918
6919 if (GET_CODE (loc) == SET)
6920 {
6921 set_src = SET_SRC (loc);
6922 loc = SET_DEST (loc);
6923 }
6924
6925 if (! flag_var_tracking_uninit)
6926 src_status = VAR_INIT_STATUS_INITIALIZED;
6927 else
6928 {
6929 src_status = find_src_status (in, set_src);
6930
6931 if (src_status == VAR_INIT_STATUS_UNKNOWN)
6932 src_status = find_src_status (out, set_src);
6933 }
6934
6935 set_src = find_src_set_src (in, set_src);
6936
6937 if (REG_P (loc))
6938 var_reg_delete_and_set (out, loc, false, src_status, set_src);
6939 else if (MEM_P (loc))
6940 var_mem_delete_and_set (out, loc, false, src_status, set_src);
6941 }
6942 break;
6943
6944 case MO_USE_NO_VAR:
6945 {
6946 rtx loc = mo->u.loc;
6947
6948 if (REG_P (loc))
6949 var_reg_delete (out, loc, false);
6950 else if (MEM_P (loc))
6951 var_mem_delete (out, loc, false);
6952 }
6953 break;
6954
6955 case MO_CLOBBER:
6956 {
6957 rtx loc = mo->u.loc;
6958
6959 if (REG_P (loc))
6960 var_reg_delete (out, loc, true);
6961 else if (MEM_P (loc))
6962 var_mem_delete (out, loc, true);
6963 }
6964 break;
6965
6966 case MO_ADJUST:
6967 out->stack_adjust += mo->u.adjust;
6968 break;
6969 }
6970 }
6971
6972 if (MAY_HAVE_DEBUG_INSNS)
6973 {
6974 delete local_get_addr_cache;
6975 local_get_addr_cache = NULL;
6976
6977 dataflow_set_equiv_regs (out);
6978 shared_hash_htab (out->vars)
6979 ->traverse <dataflow_set *, canonicalize_values_mark> (out);
6980 shared_hash_htab (out->vars)
6981 ->traverse <dataflow_set *, canonicalize_values_star> (out);
6982 #if ENABLE_CHECKING
6983 shared_hash_htab (out->vars)
6984 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out);
6985 #endif
6986 }
6987 changed = dataflow_set_different (&old_out, out);
6988 dataflow_set_destroy (&old_out);
6989 return changed;
6990 }
6991
6992 /* Find the locations of variables in the whole function. */
6993
6994 static bool
6995 vt_find_locations (void)
6996 {
6997 bb_heap_t *worklist = new bb_heap_t (LONG_MIN);
6998 bb_heap_t *pending = new bb_heap_t (LONG_MIN);
6999 sbitmap visited, in_worklist, in_pending;
7000 basic_block bb;
7001 edge e;
7002 int *bb_order;
7003 int *rc_order;
7004 int i;
7005 int htabsz = 0;
7006 int htabmax = PARAM_VALUE (PARAM_MAX_VARTRACK_SIZE);
7007 bool success = true;
7008
7009 timevar_push (TV_VAR_TRACKING_DATAFLOW);
7010 /* Compute reverse completion order of depth first search of the CFG
7011 so that the data-flow runs faster. */
7012 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
7013 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
7014 pre_and_rev_post_order_compute (NULL, rc_order, false);
7015 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
7016 bb_order[rc_order[i]] = i;
7017 free (rc_order);
7018
7019 visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
7020 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
7021 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
7022 bitmap_clear (in_worklist);
7023
7024 FOR_EACH_BB_FN (bb, cfun)
7025 pending->insert (bb_order[bb->index], bb);
7026 bitmap_ones (in_pending);
7027
7028 while (success && !pending->empty ())
7029 {
7030 std::swap (worklist, pending);
7031 std::swap (in_worklist, in_pending);
7032
7033 bitmap_clear (visited);
7034
7035 while (!worklist->empty ())
7036 {
7037 bb = worklist->extract_min ();
7038 bitmap_clear_bit (in_worklist, bb->index);
7039 gcc_assert (!bitmap_bit_p (visited, bb->index));
7040 if (!bitmap_bit_p (visited, bb->index))
7041 {
7042 bool changed;
7043 edge_iterator ei;
7044 int oldinsz, oldoutsz;
7045
7046 bitmap_set_bit (visited, bb->index);
7047
7048 if (VTI (bb)->in.vars)
7049 {
7050 htabsz
7051 -= shared_hash_htab (VTI (bb)->in.vars)->size ()
7052 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7053 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements ();
7054 oldoutsz
7055 = shared_hash_htab (VTI (bb)->out.vars)->elements ();
7056 }
7057 else
7058 oldinsz = oldoutsz = 0;
7059
7060 if (MAY_HAVE_DEBUG_INSNS)
7061 {
7062 dataflow_set *in = &VTI (bb)->in, *first_out = NULL;
7063 bool first = true, adjust = false;
7064
7065 /* Calculate the IN set as the intersection of
7066 predecessor OUT sets. */
7067
7068 dataflow_set_clear (in);
7069 dst_can_be_shared = true;
7070
7071 FOR_EACH_EDGE (e, ei, bb->preds)
7072 if (!VTI (e->src)->flooded)
7073 gcc_assert (bb_order[bb->index]
7074 <= bb_order[e->src->index]);
7075 else if (first)
7076 {
7077 dataflow_set_copy (in, &VTI (e->src)->out);
7078 first_out = &VTI (e->src)->out;
7079 first = false;
7080 }
7081 else
7082 {
7083 dataflow_set_merge (in, &VTI (e->src)->out);
7084 adjust = true;
7085 }
7086
7087 if (adjust)
7088 {
7089 dataflow_post_merge_adjust (in, &VTI (bb)->permp);
7090 #if ENABLE_CHECKING
7091 /* Merge and merge_adjust should keep entries in
7092 canonical order. */
7093 shared_hash_htab (in->vars)
7094 ->traverse <dataflow_set *,
7095 canonicalize_loc_order_check> (in);
7096 #endif
7097 if (dst_can_be_shared)
7098 {
7099 shared_hash_destroy (in->vars);
7100 in->vars = shared_hash_copy (first_out->vars);
7101 }
7102 }
7103
7104 VTI (bb)->flooded = true;
7105 }
7106 else
7107 {
7108 /* Calculate the IN set as union of predecessor OUT sets. */
7109 dataflow_set_clear (&VTI (bb)->in);
7110 FOR_EACH_EDGE (e, ei, bb->preds)
7111 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out);
7112 }
7113
7114 changed = compute_bb_dataflow (bb);
7115 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size ()
7116 + shared_hash_htab (VTI (bb)->out.vars)->size ();
7117
7118 if (htabmax && htabsz > htabmax)
7119 {
7120 if (MAY_HAVE_DEBUG_INSNS)
7121 inform (DECL_SOURCE_LOCATION (cfun->decl),
7122 "variable tracking size limit exceeded with "
7123 "-fvar-tracking-assignments, retrying without");
7124 else
7125 inform (DECL_SOURCE_LOCATION (cfun->decl),
7126 "variable tracking size limit exceeded");
7127 success = false;
7128 break;
7129 }
7130
7131 if (changed)
7132 {
7133 FOR_EACH_EDGE (e, ei, bb->succs)
7134 {
7135 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7136 continue;
7137
7138 if (bitmap_bit_p (visited, e->dest->index))
7139 {
7140 if (!bitmap_bit_p (in_pending, e->dest->index))
7141 {
7142 /* Send E->DEST to next round. */
7143 bitmap_set_bit (in_pending, e->dest->index);
7144 pending->insert (bb_order[e->dest->index],
7145 e->dest);
7146 }
7147 }
7148 else if (!bitmap_bit_p (in_worklist, e->dest->index))
7149 {
7150 /* Add E->DEST to current round. */
7151 bitmap_set_bit (in_worklist, e->dest->index);
7152 worklist->insert (bb_order[e->dest->index],
7153 e->dest);
7154 }
7155 }
7156 }
7157
7158 if (dump_file)
7159 fprintf (dump_file,
7160 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n",
7161 bb->index,
7162 (int)shared_hash_htab (VTI (bb)->in.vars)->size (),
7163 oldinsz,
7164 (int)shared_hash_htab (VTI (bb)->out.vars)->size (),
7165 oldoutsz,
7166 (int)worklist->nodes (), (int)pending->nodes (),
7167 htabsz);
7168
7169 if (dump_file && (dump_flags & TDF_DETAILS))
7170 {
7171 fprintf (dump_file, "BB %i IN:\n", bb->index);
7172 dump_dataflow_set (&VTI (bb)->in);
7173 fprintf (dump_file, "BB %i OUT:\n", bb->index);
7174 dump_dataflow_set (&VTI (bb)->out);
7175 }
7176 }
7177 }
7178 }
7179
7180 if (success && MAY_HAVE_DEBUG_INSNS)
7181 FOR_EACH_BB_FN (bb, cfun)
7182 gcc_assert (VTI (bb)->flooded);
7183
7184 free (bb_order);
7185 delete worklist;
7186 delete pending;
7187 sbitmap_free (visited);
7188 sbitmap_free (in_worklist);
7189 sbitmap_free (in_pending);
7190
7191 timevar_pop (TV_VAR_TRACKING_DATAFLOW);
7192 return success;
7193 }
7194
7195 /* Print the content of the LIST to dump file. */
7196
7197 static void
7198 dump_attrs_list (attrs list)
7199 {
7200 for (; list; list = list->next)
7201 {
7202 if (dv_is_decl_p (list->dv))
7203 print_mem_expr (dump_file, dv_as_decl (list->dv));
7204 else
7205 print_rtl_single (dump_file, dv_as_value (list->dv));
7206 fprintf (dump_file, "+" HOST_WIDE_INT_PRINT_DEC, list->offset);
7207 }
7208 fprintf (dump_file, "\n");
7209 }
7210
7211 /* Print the information about variable *SLOT to dump file. */
7212
7213 int
7214 dump_var_tracking_slot (variable_def **slot, void *data ATTRIBUTE_UNUSED)
7215 {
7216 variable var = *slot;
7217
7218 dump_var (var);
7219
7220 /* Continue traversing the hash table. */
7221 return 1;
7222 }
7223
7224 /* Print the information about variable VAR to dump file. */
7225
7226 static void
7227 dump_var (variable var)
7228 {
7229 int i;
7230 location_chain node;
7231
7232 if (dv_is_decl_p (var->dv))
7233 {
7234 const_tree decl = dv_as_decl (var->dv);
7235
7236 if (DECL_NAME (decl))
7237 {
7238 fprintf (dump_file, " name: %s",
7239 IDENTIFIER_POINTER (DECL_NAME (decl)));
7240 if (dump_flags & TDF_UID)
7241 fprintf (dump_file, "D.%u", DECL_UID (decl));
7242 }
7243 else if (TREE_CODE (decl) == DEBUG_EXPR_DECL)
7244 fprintf (dump_file, " name: D#%u", DEBUG_TEMP_UID (decl));
7245 else
7246 fprintf (dump_file, " name: D.%u", DECL_UID (decl));
7247 fprintf (dump_file, "\n");
7248 }
7249 else
7250 {
7251 fputc (' ', dump_file);
7252 print_rtl_single (dump_file, dv_as_value (var->dv));
7253 }
7254
7255 for (i = 0; i < var->n_var_parts; i++)
7256 {
7257 fprintf (dump_file, " offset %ld\n",
7258 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i)));
7259 for (node = var->var_part[i].loc_chain; node; node = node->next)
7260 {
7261 fprintf (dump_file, " ");
7262 if (node->init == VAR_INIT_STATUS_UNINITIALIZED)
7263 fprintf (dump_file, "[uninit]");
7264 print_rtl_single (dump_file, node->loc);
7265 }
7266 }
7267 }
7268
7269 /* Print the information about variables from hash table VARS to dump file. */
7270
7271 static void
7272 dump_vars (variable_table_type *vars)
7273 {
7274 if (vars->elements () > 0)
7275 {
7276 fprintf (dump_file, "Variables:\n");
7277 vars->traverse <void *, dump_var_tracking_slot> (NULL);
7278 }
7279 }
7280
7281 /* Print the dataflow set SET to dump file. */
7282
7283 static void
7284 dump_dataflow_set (dataflow_set *set)
7285 {
7286 int i;
7287
7288 fprintf (dump_file, "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n",
7289 set->stack_adjust);
7290 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7291 {
7292 if (set->regs[i])
7293 {
7294 fprintf (dump_file, "Reg %d:", i);
7295 dump_attrs_list (set->regs[i]);
7296 }
7297 }
7298 dump_vars (shared_hash_htab (set->vars));
7299 fprintf (dump_file, "\n");
7300 }
7301
7302 /* Print the IN and OUT sets for each basic block to dump file. */
7303
7304 static void
7305 dump_dataflow_sets (void)
7306 {
7307 basic_block bb;
7308
7309 FOR_EACH_BB_FN (bb, cfun)
7310 {
7311 fprintf (dump_file, "\nBasic block %d:\n", bb->index);
7312 fprintf (dump_file, "IN:\n");
7313 dump_dataflow_set (&VTI (bb)->in);
7314 fprintf (dump_file, "OUT:\n");
7315 dump_dataflow_set (&VTI (bb)->out);
7316 }
7317 }
7318
7319 /* Return the variable for DV in dropped_values, inserting one if
7320 requested with INSERT. */
7321
7322 static inline variable
7323 variable_from_dropped (decl_or_value dv, enum insert_option insert)
7324 {
7325 variable_def **slot;
7326 variable empty_var;
7327 onepart_enum_t onepart;
7328
7329 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert);
7330
7331 if (!slot)
7332 return NULL;
7333
7334 if (*slot)
7335 return *slot;
7336
7337 gcc_checking_assert (insert == INSERT);
7338
7339 onepart = dv_onepart_p (dv);
7340
7341 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR);
7342
7343 empty_var = onepart_pool (onepart).allocate ();
7344 empty_var->dv = dv;
7345 empty_var->refcount = 1;
7346 empty_var->n_var_parts = 0;
7347 empty_var->onepart = onepart;
7348 empty_var->in_changed_variables = false;
7349 empty_var->var_part[0].loc_chain = NULL;
7350 empty_var->var_part[0].cur_loc = NULL;
7351 VAR_LOC_1PAUX (empty_var) = NULL;
7352 set_dv_changed (dv, true);
7353
7354 *slot = empty_var;
7355
7356 return empty_var;
7357 }
7358
7359 /* Recover the one-part aux from dropped_values. */
7360
7361 static struct onepart_aux *
7362 recover_dropped_1paux (variable var)
7363 {
7364 variable dvar;
7365
7366 gcc_checking_assert (var->onepart);
7367
7368 if (VAR_LOC_1PAUX (var))
7369 return VAR_LOC_1PAUX (var);
7370
7371 if (var->onepart == ONEPART_VDECL)
7372 return NULL;
7373
7374 dvar = variable_from_dropped (var->dv, NO_INSERT);
7375
7376 if (!dvar)
7377 return NULL;
7378
7379 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar);
7380 VAR_LOC_1PAUX (dvar) = NULL;
7381
7382 return VAR_LOC_1PAUX (var);
7383 }
7384
7385 /* Add variable VAR to the hash table of changed variables and
7386 if it has no locations delete it from SET's hash table. */
7387
7388 static void
7389 variable_was_changed (variable var, dataflow_set *set)
7390 {
7391 hashval_t hash = dv_htab_hash (var->dv);
7392
7393 if (emit_notes)
7394 {
7395 variable_def **slot;
7396
7397 /* Remember this decl or VALUE has been added to changed_variables. */
7398 set_dv_changed (var->dv, true);
7399
7400 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT);
7401
7402 if (*slot)
7403 {
7404 variable old_var = *slot;
7405 gcc_assert (old_var->in_changed_variables);
7406 old_var->in_changed_variables = false;
7407 if (var != old_var && var->onepart)
7408 {
7409 /* Restore the auxiliary info from an empty variable
7410 previously created for changed_variables, so it is
7411 not lost. */
7412 gcc_checking_assert (!VAR_LOC_1PAUX (var));
7413 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var);
7414 VAR_LOC_1PAUX (old_var) = NULL;
7415 }
7416 variable_htab_free (*slot);
7417 }
7418
7419 if (set && var->n_var_parts == 0)
7420 {
7421 onepart_enum_t onepart = var->onepart;
7422 variable empty_var = NULL;
7423 variable_def **dslot = NULL;
7424
7425 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR)
7426 {
7427 dslot = dropped_values->find_slot_with_hash (var->dv,
7428 dv_htab_hash (var->dv),
7429 INSERT);
7430 empty_var = *dslot;
7431
7432 if (empty_var)
7433 {
7434 gcc_checking_assert (!empty_var->in_changed_variables);
7435 if (!VAR_LOC_1PAUX (var))
7436 {
7437 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var);
7438 VAR_LOC_1PAUX (empty_var) = NULL;
7439 }
7440 else
7441 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
7442 }
7443 }
7444
7445 if (!empty_var)
7446 {
7447 empty_var = onepart_pool (onepart).allocate ();
7448 empty_var->dv = var->dv;
7449 empty_var->refcount = 1;
7450 empty_var->n_var_parts = 0;
7451 empty_var->onepart = onepart;
7452 if (dslot)
7453 {
7454 empty_var->refcount++;
7455 *dslot = empty_var;
7456 }
7457 }
7458 else
7459 empty_var->refcount++;
7460 empty_var->in_changed_variables = true;
7461 *slot = empty_var;
7462 if (onepart)
7463 {
7464 empty_var->var_part[0].loc_chain = NULL;
7465 empty_var->var_part[0].cur_loc = NULL;
7466 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var);
7467 VAR_LOC_1PAUX (var) = NULL;
7468 }
7469 goto drop_var;
7470 }
7471 else
7472 {
7473 if (var->onepart && !VAR_LOC_1PAUX (var))
7474 recover_dropped_1paux (var);
7475 var->refcount++;
7476 var->in_changed_variables = true;
7477 *slot = var;
7478 }
7479 }
7480 else
7481 {
7482 gcc_assert (set);
7483 if (var->n_var_parts == 0)
7484 {
7485 variable_def **slot;
7486
7487 drop_var:
7488 slot = shared_hash_find_slot_noinsert (set->vars, var->dv);
7489 if (slot)
7490 {
7491 if (shared_hash_shared (set->vars))
7492 slot = shared_hash_find_slot_unshare (&set->vars, var->dv,
7493 NO_INSERT);
7494 shared_hash_htab (set->vars)->clear_slot (slot);
7495 }
7496 }
7497 }
7498 }
7499
7500 /* Look for the index in VAR->var_part corresponding to OFFSET.
7501 Return -1 if not found. If INSERTION_POINT is non-NULL, the
7502 referenced int will be set to the index that the part has or should
7503 have, if it should be inserted. */
7504
7505 static inline int
7506 find_variable_location_part (variable var, HOST_WIDE_INT offset,
7507 int *insertion_point)
7508 {
7509 int pos, low, high;
7510
7511 if (var->onepart)
7512 {
7513 if (offset != 0)
7514 return -1;
7515
7516 if (insertion_point)
7517 *insertion_point = 0;
7518
7519 return var->n_var_parts - 1;
7520 }
7521
7522 /* Find the location part. */
7523 low = 0;
7524 high = var->n_var_parts;
7525 while (low != high)
7526 {
7527 pos = (low + high) / 2;
7528 if (VAR_PART_OFFSET (var, pos) < offset)
7529 low = pos + 1;
7530 else
7531 high = pos;
7532 }
7533 pos = low;
7534
7535 if (insertion_point)
7536 *insertion_point = pos;
7537
7538 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset)
7539 return pos;
7540
7541 return -1;
7542 }
7543
7544 static variable_def **
7545 set_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7546 decl_or_value dv, HOST_WIDE_INT offset,
7547 enum var_init_status initialized, rtx set_src)
7548 {
7549 int pos;
7550 location_chain node, next;
7551 location_chain *nextp;
7552 variable var;
7553 onepart_enum_t onepart;
7554
7555 var = *slot;
7556
7557 if (var)
7558 onepart = var->onepart;
7559 else
7560 onepart = dv_onepart_p (dv);
7561
7562 gcc_checking_assert (offset == 0 || !onepart);
7563 gcc_checking_assert (loc != dv_as_opaque (dv));
7564
7565 if (! flag_var_tracking_uninit)
7566 initialized = VAR_INIT_STATUS_INITIALIZED;
7567
7568 if (!var)
7569 {
7570 /* Create new variable information. */
7571 var = onepart_pool (onepart).allocate ();
7572 var->dv = dv;
7573 var->refcount = 1;
7574 var->n_var_parts = 1;
7575 var->onepart = onepart;
7576 var->in_changed_variables = false;
7577 if (var->onepart)
7578 VAR_LOC_1PAUX (var) = NULL;
7579 else
7580 VAR_PART_OFFSET (var, 0) = offset;
7581 var->var_part[0].loc_chain = NULL;
7582 var->var_part[0].cur_loc = NULL;
7583 *slot = var;
7584 pos = 0;
7585 nextp = &var->var_part[0].loc_chain;
7586 }
7587 else if (onepart)
7588 {
7589 int r = -1, c = 0;
7590
7591 gcc_assert (dv_as_opaque (var->dv) == dv_as_opaque (dv));
7592
7593 pos = 0;
7594
7595 if (GET_CODE (loc) == VALUE)
7596 {
7597 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7598 nextp = &node->next)
7599 if (GET_CODE (node->loc) == VALUE)
7600 {
7601 if (node->loc == loc)
7602 {
7603 r = 0;
7604 break;
7605 }
7606 if (canon_value_cmp (node->loc, loc))
7607 c++;
7608 else
7609 {
7610 r = 1;
7611 break;
7612 }
7613 }
7614 else if (REG_P (node->loc) || MEM_P (node->loc))
7615 c++;
7616 else
7617 {
7618 r = 1;
7619 break;
7620 }
7621 }
7622 else if (REG_P (loc))
7623 {
7624 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7625 nextp = &node->next)
7626 if (REG_P (node->loc))
7627 {
7628 if (REGNO (node->loc) < REGNO (loc))
7629 c++;
7630 else
7631 {
7632 if (REGNO (node->loc) == REGNO (loc))
7633 r = 0;
7634 else
7635 r = 1;
7636 break;
7637 }
7638 }
7639 else
7640 {
7641 r = 1;
7642 break;
7643 }
7644 }
7645 else if (MEM_P (loc))
7646 {
7647 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7648 nextp = &node->next)
7649 if (REG_P (node->loc))
7650 c++;
7651 else if (MEM_P (node->loc))
7652 {
7653 if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0)
7654 break;
7655 else
7656 c++;
7657 }
7658 else
7659 {
7660 r = 1;
7661 break;
7662 }
7663 }
7664 else
7665 for (nextp = &var->var_part[0].loc_chain; (node = *nextp);
7666 nextp = &node->next)
7667 if ((r = loc_cmp (node->loc, loc)) >= 0)
7668 break;
7669 else
7670 c++;
7671
7672 if (r == 0)
7673 return slot;
7674
7675 if (shared_var_p (var, set->vars))
7676 {
7677 slot = unshare_variable (set, slot, var, initialized);
7678 var = *slot;
7679 for (nextp = &var->var_part[0].loc_chain; c;
7680 nextp = &(*nextp)->next)
7681 c--;
7682 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc);
7683 }
7684 }
7685 else
7686 {
7687 int inspos = 0;
7688
7689 gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv));
7690
7691 pos = find_variable_location_part (var, offset, &inspos);
7692
7693 if (pos >= 0)
7694 {
7695 node = var->var_part[pos].loc_chain;
7696
7697 if (node
7698 && ((REG_P (node->loc) && REG_P (loc)
7699 && REGNO (node->loc) == REGNO (loc))
7700 || rtx_equal_p (node->loc, loc)))
7701 {
7702 /* LOC is in the beginning of the chain so we have nothing
7703 to do. */
7704 if (node->init < initialized)
7705 node->init = initialized;
7706 if (set_src != NULL)
7707 node->set_src = set_src;
7708
7709 return slot;
7710 }
7711 else
7712 {
7713 /* We have to make a copy of a shared variable. */
7714 if (shared_var_p (var, set->vars))
7715 {
7716 slot = unshare_variable (set, slot, var, initialized);
7717 var = *slot;
7718 }
7719 }
7720 }
7721 else
7722 {
7723 /* We have not found the location part, new one will be created. */
7724
7725 /* We have to make a copy of the shared variable. */
7726 if (shared_var_p (var, set->vars))
7727 {
7728 slot = unshare_variable (set, slot, var, initialized);
7729 var = *slot;
7730 }
7731
7732 /* We track only variables whose size is <= MAX_VAR_PARTS bytes
7733 thus there are at most MAX_VAR_PARTS different offsets. */
7734 gcc_assert (var->n_var_parts < MAX_VAR_PARTS
7735 && (!var->n_var_parts || !onepart));
7736
7737 /* We have to move the elements of array starting at index
7738 inspos to the next position. */
7739 for (pos = var->n_var_parts; pos > inspos; pos--)
7740 var->var_part[pos] = var->var_part[pos - 1];
7741
7742 var->n_var_parts++;
7743 gcc_checking_assert (!onepart);
7744 VAR_PART_OFFSET (var, pos) = offset;
7745 var->var_part[pos].loc_chain = NULL;
7746 var->var_part[pos].cur_loc = NULL;
7747 }
7748
7749 /* Delete the location from the list. */
7750 nextp = &var->var_part[pos].loc_chain;
7751 for (node = var->var_part[pos].loc_chain; node; node = next)
7752 {
7753 next = node->next;
7754 if ((REG_P (node->loc) && REG_P (loc)
7755 && REGNO (node->loc) == REGNO (loc))
7756 || rtx_equal_p (node->loc, loc))
7757 {
7758 /* Save these values, to assign to the new node, before
7759 deleting this one. */
7760 if (node->init > initialized)
7761 initialized = node->init;
7762 if (node->set_src != NULL && set_src == NULL)
7763 set_src = node->set_src;
7764 if (var->var_part[pos].cur_loc == node->loc)
7765 var->var_part[pos].cur_loc = NULL;
7766 delete node;
7767 *nextp = next;
7768 break;
7769 }
7770 else
7771 nextp = &node->next;
7772 }
7773
7774 nextp = &var->var_part[pos].loc_chain;
7775 }
7776
7777 /* Add the location to the beginning. */
7778 node = new location_chain_def;
7779 node->loc = loc;
7780 node->init = initialized;
7781 node->set_src = set_src;
7782 node->next = *nextp;
7783 *nextp = node;
7784
7785 /* If no location was emitted do so. */
7786 if (var->var_part[pos].cur_loc == NULL)
7787 variable_was_changed (var, set);
7788
7789 return slot;
7790 }
7791
7792 /* Set the part of variable's location in the dataflow set SET. The
7793 variable part is specified by variable's declaration in DV and
7794 offset OFFSET and the part's location by LOC. IOPT should be
7795 NO_INSERT if the variable is known to be in SET already and the
7796 variable hash table must not be resized, and INSERT otherwise. */
7797
7798 static void
7799 set_variable_part (dataflow_set *set, rtx loc,
7800 decl_or_value dv, HOST_WIDE_INT offset,
7801 enum var_init_status initialized, rtx set_src,
7802 enum insert_option iopt)
7803 {
7804 variable_def **slot;
7805
7806 if (iopt == NO_INSERT)
7807 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7808 else
7809 {
7810 slot = shared_hash_find_slot (set->vars, dv);
7811 if (!slot)
7812 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt);
7813 }
7814 set_slot_part (set, loc, slot, dv, offset, initialized, set_src);
7815 }
7816
7817 /* Remove all recorded register locations for the given variable part
7818 from dataflow set SET, except for those that are identical to loc.
7819 The variable part is specified by variable's declaration or value
7820 DV and offset OFFSET. */
7821
7822 static variable_def **
7823 clobber_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7824 HOST_WIDE_INT offset, rtx set_src)
7825 {
7826 variable var = *slot;
7827 int pos = find_variable_location_part (var, offset, NULL);
7828
7829 if (pos >= 0)
7830 {
7831 location_chain node, next;
7832
7833 /* Remove the register locations from the dataflow set. */
7834 next = var->var_part[pos].loc_chain;
7835 for (node = next; node; node = next)
7836 {
7837 next = node->next;
7838 if (node->loc != loc
7839 && (!flag_var_tracking_uninit
7840 || !set_src
7841 || MEM_P (set_src)
7842 || !rtx_equal_p (set_src, node->set_src)))
7843 {
7844 if (REG_P (node->loc))
7845 {
7846 attrs anode, anext;
7847 attrs *anextp;
7848
7849 /* Remove the variable part from the register's
7850 list, but preserve any other variable parts
7851 that might be regarded as live in that same
7852 register. */
7853 anextp = &set->regs[REGNO (node->loc)];
7854 for (anode = *anextp; anode; anode = anext)
7855 {
7856 anext = anode->next;
7857 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv)
7858 && anode->offset == offset)
7859 {
7860 delete anode;
7861 *anextp = anext;
7862 }
7863 else
7864 anextp = &anode->next;
7865 }
7866 }
7867
7868 slot = delete_slot_part (set, node->loc, slot, offset);
7869 }
7870 }
7871 }
7872
7873 return slot;
7874 }
7875
7876 /* Remove all recorded register locations for the given variable part
7877 from dataflow set SET, except for those that are identical to loc.
7878 The variable part is specified by variable's declaration or value
7879 DV and offset OFFSET. */
7880
7881 static void
7882 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7883 HOST_WIDE_INT offset, rtx set_src)
7884 {
7885 variable_def **slot;
7886
7887 if (!dv_as_opaque (dv)
7888 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv))))
7889 return;
7890
7891 slot = shared_hash_find_slot_noinsert (set->vars, dv);
7892 if (!slot)
7893 return;
7894
7895 clobber_slot_part (set, loc, slot, offset, set_src);
7896 }
7897
7898 /* Delete the part of variable's location from dataflow set SET. The
7899 variable part is specified by its SET->vars slot SLOT and offset
7900 OFFSET and the part's location by LOC. */
7901
7902 static variable_def **
7903 delete_slot_part (dataflow_set *set, rtx loc, variable_def **slot,
7904 HOST_WIDE_INT offset)
7905 {
7906 variable var = *slot;
7907 int pos = find_variable_location_part (var, offset, NULL);
7908
7909 if (pos >= 0)
7910 {
7911 location_chain node, next;
7912 location_chain *nextp;
7913 bool changed;
7914 rtx cur_loc;
7915
7916 if (shared_var_p (var, set->vars))
7917 {
7918 /* If the variable contains the location part we have to
7919 make a copy of the variable. */
7920 for (node = var->var_part[pos].loc_chain; node;
7921 node = node->next)
7922 {
7923 if ((REG_P (node->loc) && REG_P (loc)
7924 && REGNO (node->loc) == REGNO (loc))
7925 || rtx_equal_p (node->loc, loc))
7926 {
7927 slot = unshare_variable (set, slot, var,
7928 VAR_INIT_STATUS_UNKNOWN);
7929 var = *slot;
7930 break;
7931 }
7932 }
7933 }
7934
7935 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7936 cur_loc = VAR_LOC_FROM (var);
7937 else
7938 cur_loc = var->var_part[pos].cur_loc;
7939
7940 /* Delete the location part. */
7941 changed = false;
7942 nextp = &var->var_part[pos].loc_chain;
7943 for (node = *nextp; node; node = next)
7944 {
7945 next = node->next;
7946 if ((REG_P (node->loc) && REG_P (loc)
7947 && REGNO (node->loc) == REGNO (loc))
7948 || rtx_equal_p (node->loc, loc))
7949 {
7950 /* If we have deleted the location which was last emitted
7951 we have to emit new location so add the variable to set
7952 of changed variables. */
7953 if (cur_loc == node->loc)
7954 {
7955 changed = true;
7956 var->var_part[pos].cur_loc = NULL;
7957 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var))
7958 VAR_LOC_FROM (var) = NULL;
7959 }
7960 delete node;
7961 *nextp = next;
7962 break;
7963 }
7964 else
7965 nextp = &node->next;
7966 }
7967
7968 if (var->var_part[pos].loc_chain == NULL)
7969 {
7970 changed = true;
7971 var->n_var_parts--;
7972 while (pos < var->n_var_parts)
7973 {
7974 var->var_part[pos] = var->var_part[pos + 1];
7975 pos++;
7976 }
7977 }
7978 if (changed)
7979 variable_was_changed (var, set);
7980 }
7981
7982 return slot;
7983 }
7984
7985 /* Delete the part of variable's location from dataflow set SET. The
7986 variable part is specified by variable's declaration or value DV
7987 and offset OFFSET and the part's location by LOC. */
7988
7989 static void
7990 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
7991 HOST_WIDE_INT offset)
7992 {
7993 variable_def **slot = shared_hash_find_slot_noinsert (set->vars, dv);
7994 if (!slot)
7995 return;
7996
7997 delete_slot_part (set, loc, slot, offset);
7998 }
7999
8000
8001 /* Structure for passing some other parameters to function
8002 vt_expand_loc_callback. */
8003 struct expand_loc_callback_data
8004 {
8005 /* The variables and values active at this point. */
8006 variable_table_type *vars;
8007
8008 /* Stack of values and debug_exprs under expansion, and their
8009 children. */
8010 auto_vec<rtx, 4> expanding;
8011
8012 /* Stack of values and debug_exprs whose expansion hit recursion
8013 cycles. They will have VALUE_RECURSED_INTO marked when added to
8014 this list. This flag will be cleared if any of its dependencies
8015 resolves to a valid location. So, if the flag remains set at the
8016 end of the search, we know no valid location for this one can
8017 possibly exist. */
8018 auto_vec<rtx, 4> pending;
8019
8020 /* The maximum depth among the sub-expressions under expansion.
8021 Zero indicates no expansion so far. */
8022 expand_depth depth;
8023 };
8024
8025 /* Allocate the one-part auxiliary data structure for VAR, with enough
8026 room for COUNT dependencies. */
8027
8028 static void
8029 loc_exp_dep_alloc (variable var, int count)
8030 {
8031 size_t allocsize;
8032
8033 gcc_checking_assert (var->onepart);
8034
8035 /* We can be called with COUNT == 0 to allocate the data structure
8036 without any dependencies, e.g. for the backlinks only. However,
8037 if we are specifying a COUNT, then the dependency list must have
8038 been emptied before. It would be possible to adjust pointers or
8039 force it empty here, but this is better done at an earlier point
8040 in the algorithm, so we instead leave an assertion to catch
8041 errors. */
8042 gcc_checking_assert (!count
8043 || VAR_LOC_DEP_VEC (var) == NULL
8044 || VAR_LOC_DEP_VEC (var)->is_empty ());
8045
8046 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
8047 return;
8048
8049 allocsize = offsetof (struct onepart_aux, deps)
8050 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
8051
8052 if (VAR_LOC_1PAUX (var))
8053 {
8054 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux,
8055 VAR_LOC_1PAUX (var), allocsize);
8056 /* If the reallocation moves the onepaux structure, the
8057 back-pointer to BACKLINKS in the first list member will still
8058 point to its old location. Adjust it. */
8059 if (VAR_LOC_DEP_LST (var))
8060 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var);
8061 }
8062 else
8063 {
8064 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize);
8065 *VAR_LOC_DEP_LSTP (var) = NULL;
8066 VAR_LOC_FROM (var) = NULL;
8067 VAR_LOC_DEPTH (var).complexity = 0;
8068 VAR_LOC_DEPTH (var).entryvals = 0;
8069 }
8070 VAR_LOC_DEP_VEC (var)->embedded_init (count);
8071 }
8072
8073 /* Remove all entries from the vector of active dependencies of VAR,
8074 removing them from the back-links lists too. */
8075
8076 static void
8077 loc_exp_dep_clear (variable var)
8078 {
8079 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
8080 {
8081 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
8082 if (led->next)
8083 led->next->pprev = led->pprev;
8084 if (led->pprev)
8085 *led->pprev = led->next;
8086 VAR_LOC_DEP_VEC (var)->pop ();
8087 }
8088 }
8089
8090 /* Insert an active dependency from VAR on X to the vector of
8091 dependencies, and add the corresponding back-link to X's list of
8092 back-links in VARS. */
8093
8094 static void
8095 loc_exp_insert_dep (variable var, rtx x, variable_table_type *vars)
8096 {
8097 decl_or_value dv;
8098 variable xvar;
8099 loc_exp_dep *led;
8100
8101 dv = dv_from_rtx (x);
8102
8103 /* ??? Build a vector of variables parallel to EXPANDING, to avoid
8104 an additional look up? */
8105 xvar = vars->find_with_hash (dv, dv_htab_hash (dv));
8106
8107 if (!xvar)
8108 {
8109 xvar = variable_from_dropped (dv, NO_INSERT);
8110 gcc_checking_assert (xvar);
8111 }
8112
8113 /* No point in adding the same backlink more than once. This may
8114 arise if say the same value appears in two complex expressions in
8115 the same loc_list, or even more than once in a single
8116 expression. */
8117 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv)
8118 return;
8119
8120 if (var->onepart == NOT_ONEPART)
8121 led = new loc_exp_dep;
8122 else
8123 {
8124 loc_exp_dep empty;
8125 memset (&empty, 0, sizeof (empty));
8126 VAR_LOC_DEP_VEC (var)->quick_push (empty);
8127 led = &VAR_LOC_DEP_VEC (var)->last ();
8128 }
8129 led->dv = var->dv;
8130 led->value = x;
8131
8132 loc_exp_dep_alloc (xvar, 0);
8133 led->pprev = VAR_LOC_DEP_LSTP (xvar);
8134 led->next = *led->pprev;
8135 if (led->next)
8136 led->next->pprev = &led->next;
8137 *led->pprev = led;
8138 }
8139
8140 /* Create active dependencies of VAR on COUNT values starting at
8141 VALUE, and corresponding back-links to the entries in VARS. Return
8142 true if we found any pending-recursion results. */
8143
8144 static bool
8145 loc_exp_dep_set (variable var, rtx result, rtx *value, int count,
8146 variable_table_type *vars)
8147 {
8148 bool pending_recursion = false;
8149
8150 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
8151 || VAR_LOC_DEP_VEC (var)->is_empty ());
8152
8153 /* Set up all dependencies from last_child (as set up at the end of
8154 the loop above) to the end. */
8155 loc_exp_dep_alloc (var, count);
8156
8157 while (count--)
8158 {
8159 rtx x = *value++;
8160
8161 if (!pending_recursion)
8162 pending_recursion = !result && VALUE_RECURSED_INTO (x);
8163
8164 loc_exp_insert_dep (var, x, vars);
8165 }
8166
8167 return pending_recursion;
8168 }
8169
8170 /* Notify the back-links of IVAR that are pending recursion that we
8171 have found a non-NIL value for it, so they are cleared for another
8172 attempt to compute a current location. */
8173
8174 static void
8175 notify_dependents_of_resolved_value (variable ivar, variable_table_type *vars)
8176 {
8177 loc_exp_dep *led, *next;
8178
8179 for (led = VAR_LOC_DEP_LST (ivar); led; led = next)
8180 {
8181 decl_or_value dv = led->dv;
8182 variable var;
8183
8184 next = led->next;
8185
8186 if (dv_is_value_p (dv))
8187 {
8188 rtx value = dv_as_value (dv);
8189
8190 /* If we have already resolved it, leave it alone. */
8191 if (!VALUE_RECURSED_INTO (value))
8192 continue;
8193
8194 /* Check that VALUE_RECURSED_INTO, true from the test above,
8195 implies NO_LOC_P. */
8196 gcc_checking_assert (NO_LOC_P (value));
8197
8198 /* We won't notify variables that are being expanded,
8199 because their dependency list is cleared before
8200 recursing. */
8201 NO_LOC_P (value) = false;
8202 VALUE_RECURSED_INTO (value) = false;
8203
8204 gcc_checking_assert (dv_changed_p (dv));
8205 }
8206 else
8207 {
8208 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART);
8209 if (!dv_changed_p (dv))
8210 continue;
8211 }
8212
8213 var = vars->find_with_hash (dv, dv_htab_hash (dv));
8214
8215 if (!var)
8216 var = variable_from_dropped (dv, NO_INSERT);
8217
8218 if (var)
8219 notify_dependents_of_resolved_value (var, vars);
8220
8221 if (next)
8222 next->pprev = led->pprev;
8223 if (led->pprev)
8224 *led->pprev = next;
8225 led->next = NULL;
8226 led->pprev = NULL;
8227 }
8228 }
8229
8230 static rtx vt_expand_loc_callback (rtx x, bitmap regs,
8231 int max_depth, void *data);
8232
8233 /* Return the combined depth, when one sub-expression evaluated to
8234 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */
8235
8236 static inline expand_depth
8237 update_depth (expand_depth saved_depth, expand_depth best_depth)
8238 {
8239 /* If we didn't find anything, stick with what we had. */
8240 if (!best_depth.complexity)
8241 return saved_depth;
8242
8243 /* If we found hadn't found anything, use the depth of the current
8244 expression. Do NOT add one extra level, we want to compute the
8245 maximum depth among sub-expressions. We'll increment it later,
8246 if appropriate. */
8247 if (!saved_depth.complexity)
8248 return best_depth;
8249
8250 /* Combine the entryval count so that regardless of which one we
8251 return, the entryval count is accurate. */
8252 best_depth.entryvals = saved_depth.entryvals
8253 = best_depth.entryvals + saved_depth.entryvals;
8254
8255 if (saved_depth.complexity < best_depth.complexity)
8256 return best_depth;
8257 else
8258 return saved_depth;
8259 }
8260
8261 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and
8262 DATA for cselib expand callback. If PENDRECP is given, indicate in
8263 it whether any sub-expression couldn't be fully evaluated because
8264 it is pending recursion resolution. */
8265
8266 static inline rtx
8267 vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
8268 {
8269 struct expand_loc_callback_data *elcd
8270 = (struct expand_loc_callback_data *) data;
8271 location_chain loc, next;
8272 rtx result = NULL;
8273 int first_child, result_first_child, last_child;
8274 bool pending_recursion;
8275 rtx loc_from = NULL;
8276 struct elt_loc_list *cloc = NULL;
8277 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth;
8278 int wanted_entryvals, found_entryvals = 0;
8279
8280 /* Clear all backlinks pointing at this, so that we're not notified
8281 while we're active. */
8282 loc_exp_dep_clear (var);
8283
8284 retry:
8285 if (var->onepart == ONEPART_VALUE)
8286 {
8287 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv));
8288
8289 gcc_checking_assert (cselib_preserved_value_p (val));
8290
8291 cloc = val->locs;
8292 }
8293
8294 first_child = result_first_child = last_child
8295 = elcd->expanding.length ();
8296
8297 wanted_entryvals = found_entryvals;
8298
8299 /* Attempt to expand each available location in turn. */
8300 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL;
8301 loc || cloc; loc = next)
8302 {
8303 result_first_child = last_child;
8304
8305 if (!loc)
8306 {
8307 loc_from = cloc->loc;
8308 next = loc;
8309 cloc = cloc->next;
8310 if (unsuitable_loc (loc_from))
8311 continue;
8312 }
8313 else
8314 {
8315 loc_from = loc->loc;
8316 next = loc->next;
8317 }
8318
8319 gcc_checking_assert (!unsuitable_loc (loc_from));
8320
8321 elcd->depth.complexity = elcd->depth.entryvals = 0;
8322 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
8323 vt_expand_loc_callback, data);
8324 last_child = elcd->expanding.length ();
8325
8326 if (result)
8327 {
8328 depth = elcd->depth;
8329
8330 gcc_checking_assert (depth.complexity
8331 || result_first_child == last_child);
8332
8333 if (last_child - result_first_child != 1)
8334 {
8335 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE)
8336 depth.entryvals++;
8337 depth.complexity++;
8338 }
8339
8340 if (depth.complexity <= EXPR_USE_DEPTH)
8341 {
8342 if (depth.entryvals <= wanted_entryvals)
8343 break;
8344 else if (!found_entryvals || depth.entryvals < found_entryvals)
8345 found_entryvals = depth.entryvals;
8346 }
8347
8348 result = NULL;
8349 }
8350
8351 /* Set it up in case we leave the loop. */
8352 depth.complexity = depth.entryvals = 0;
8353 loc_from = NULL;
8354 result_first_child = first_child;
8355 }
8356
8357 if (!loc_from && wanted_entryvals < found_entryvals)
8358 {
8359 /* We found entries with ENTRY_VALUEs and skipped them. Since
8360 we could not find any expansions without ENTRY_VALUEs, but we
8361 found at least one with them, go back and get an entry with
8362 the minimum number ENTRY_VALUE count that we found. We could
8363 avoid looping, but since each sub-loc is already resolved,
8364 the re-expansion should be trivial. ??? Should we record all
8365 attempted locs as dependencies, so that we retry the
8366 expansion should any of them change, in the hope it can give
8367 us a new entry without an ENTRY_VALUE? */
8368 elcd->expanding.truncate (first_child);
8369 goto retry;
8370 }
8371
8372 /* Register all encountered dependencies as active. */
8373 pending_recursion = loc_exp_dep_set
8374 (var, result, elcd->expanding.address () + result_first_child,
8375 last_child - result_first_child, elcd->vars);
8376
8377 elcd->expanding.truncate (first_child);
8378
8379 /* Record where the expansion came from. */
8380 gcc_checking_assert (!result || !pending_recursion);
8381 VAR_LOC_FROM (var) = loc_from;
8382 VAR_LOC_DEPTH (var) = depth;
8383
8384 gcc_checking_assert (!depth.complexity == !result);
8385
8386 elcd->depth = update_depth (saved_depth, depth);
8387
8388 /* Indicate whether any of the dependencies are pending recursion
8389 resolution. */
8390 if (pendrecp)
8391 *pendrecp = pending_recursion;
8392
8393 if (!pendrecp || !pending_recursion)
8394 var->var_part[0].cur_loc = result;
8395
8396 return result;
8397 }
8398
8399 /* Callback for cselib_expand_value, that looks for expressions
8400 holding the value in the var-tracking hash tables. Return X for
8401 standard processing, anything else is to be used as-is. */
8402
8403 static rtx
8404 vt_expand_loc_callback (rtx x, bitmap regs,
8405 int max_depth ATTRIBUTE_UNUSED,
8406 void *data)
8407 {
8408 struct expand_loc_callback_data *elcd
8409 = (struct expand_loc_callback_data *) data;
8410 decl_or_value dv;
8411 variable var;
8412 rtx result, subreg;
8413 bool pending_recursion = false;
8414 bool from_empty = false;
8415
8416 switch (GET_CODE (x))
8417 {
8418 case SUBREG:
8419 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs,
8420 EXPR_DEPTH,
8421 vt_expand_loc_callback, data);
8422
8423 if (!subreg)
8424 return NULL;
8425
8426 result = simplify_gen_subreg (GET_MODE (x), subreg,
8427 GET_MODE (SUBREG_REG (x)),
8428 SUBREG_BYTE (x));
8429
8430 /* Invalid SUBREGs are ok in debug info. ??? We could try
8431 alternate expansions for the VALUE as well. */
8432 if (!result)
8433 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x));
8434
8435 return result;
8436
8437 case DEBUG_EXPR:
8438 case VALUE:
8439 dv = dv_from_rtx (x);
8440 break;
8441
8442 default:
8443 return x;
8444 }
8445
8446 elcd->expanding.safe_push (x);
8447
8448 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
8449 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
8450
8451 if (NO_LOC_P (x))
8452 {
8453 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv));
8454 return NULL;
8455 }
8456
8457 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv));
8458
8459 if (!var)
8460 {
8461 from_empty = true;
8462 var = variable_from_dropped (dv, INSERT);
8463 }
8464
8465 gcc_checking_assert (var);
8466
8467 if (!dv_changed_p (dv))
8468 {
8469 gcc_checking_assert (!NO_LOC_P (x));
8470 gcc_checking_assert (var->var_part[0].cur_loc);
8471 gcc_checking_assert (VAR_LOC_1PAUX (var));
8472 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity);
8473
8474 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth);
8475
8476 return var->var_part[0].cur_loc;
8477 }
8478
8479 VALUE_RECURSED_INTO (x) = true;
8480 /* This is tentative, but it makes some tests simpler. */
8481 NO_LOC_P (x) = true;
8482
8483 gcc_checking_assert (var->n_var_parts == 1 || from_empty);
8484
8485 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion);
8486
8487 if (pending_recursion)
8488 {
8489 gcc_checking_assert (!result);
8490 elcd->pending.safe_push (x);
8491 }
8492 else
8493 {
8494 NO_LOC_P (x) = !result;
8495 VALUE_RECURSED_INTO (x) = false;
8496 set_dv_changed (dv, false);
8497
8498 if (result)
8499 notify_dependents_of_resolved_value (var, elcd->vars);
8500 }
8501
8502 return result;
8503 }
8504
8505 /* While expanding variables, we may encounter recursion cycles
8506 because of mutual (possibly indirect) dependencies between two
8507 particular variables (or values), say A and B. If we're trying to
8508 expand A when we get to B, which in turn attempts to expand A, if
8509 we can't find any other expansion for B, we'll add B to this
8510 pending-recursion stack, and tentatively return NULL for its
8511 location. This tentative value will be used for any other
8512 occurrences of B, unless A gets some other location, in which case
8513 it will notify B that it is worth another try at computing a
8514 location for it, and it will use the location computed for A then.
8515 At the end of the expansion, the tentative NULL locations become
8516 final for all members of PENDING that didn't get a notification.
8517 This function performs this finalization of NULL locations. */
8518
8519 static void
8520 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending)
8521 {
8522 while (!pending->is_empty ())
8523 {
8524 rtx x = pending->pop ();
8525 decl_or_value dv;
8526
8527 if (!VALUE_RECURSED_INTO (x))
8528 continue;
8529
8530 gcc_checking_assert (NO_LOC_P (x));
8531 VALUE_RECURSED_INTO (x) = false;
8532 dv = dv_from_rtx (x);
8533 gcc_checking_assert (dv_changed_p (dv));
8534 set_dv_changed (dv, false);
8535 }
8536 }
8537
8538 /* Initialize expand_loc_callback_data D with variable hash table V.
8539 It must be a macro because of alloca (vec stack). */
8540 #define INIT_ELCD(d, v) \
8541 do \
8542 { \
8543 (d).vars = (v); \
8544 (d).depth.complexity = (d).depth.entryvals = 0; \
8545 } \
8546 while (0)
8547 /* Finalize expand_loc_callback_data D, resolved to location L. */
8548 #define FINI_ELCD(d, l) \
8549 do \
8550 { \
8551 resolve_expansions_pending_recursion (&(d).pending); \
8552 (d).pending.release (); \
8553 (d).expanding.release (); \
8554 \
8555 if ((l) && MEM_P (l)) \
8556 (l) = targetm.delegitimize_address (l); \
8557 } \
8558 while (0)
8559
8560 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the
8561 equivalences in VARS, updating their CUR_LOCs in the process. */
8562
8563 static rtx
8564 vt_expand_loc (rtx loc, variable_table_type *vars)
8565 {
8566 struct expand_loc_callback_data data;
8567 rtx result;
8568
8569 if (!MAY_HAVE_DEBUG_INSNS)
8570 return loc;
8571
8572 INIT_ELCD (data, vars);
8573
8574 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH,
8575 vt_expand_loc_callback, &data);
8576
8577 FINI_ELCD (data, result);
8578
8579 return result;
8580 }
8581
8582 /* Expand the one-part VARiable to a location, using the equivalences
8583 in VARS, updating their CUR_LOCs in the process. */
8584
8585 static rtx
8586 vt_expand_1pvar (variable var, variable_table_type *vars)
8587 {
8588 struct expand_loc_callback_data data;
8589 rtx loc;
8590
8591 gcc_checking_assert (var->onepart && var->n_var_parts == 1);
8592
8593 if (!dv_changed_p (var->dv))
8594 return var->var_part[0].cur_loc;
8595
8596 INIT_ELCD (data, vars);
8597
8598 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
8599
8600 gcc_checking_assert (data.expanding.is_empty ());
8601
8602 FINI_ELCD (data, loc);
8603
8604 return loc;
8605 }
8606
8607 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains
8608 additional parameters: WHERE specifies whether the note shall be emitted
8609 before or after instruction INSN. */
8610
8611 int
8612 emit_note_insn_var_location (variable_def **varp, emit_note_data *data)
8613 {
8614 variable var = *varp;
8615 rtx_insn *insn = data->insn;
8616 enum emit_note_where where = data->where;
8617 variable_table_type *vars = data->vars;
8618 rtx_note *note;
8619 rtx note_vl;
8620 int i, j, n_var_parts;
8621 bool complete;
8622 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED;
8623 HOST_WIDE_INT last_limit;
8624 tree type_size_unit;
8625 HOST_WIDE_INT offsets[MAX_VAR_PARTS];
8626 rtx loc[MAX_VAR_PARTS];
8627 tree decl;
8628 location_chain lc;
8629
8630 gcc_checking_assert (var->onepart == NOT_ONEPART
8631 || var->onepart == ONEPART_VDECL);
8632
8633 decl = dv_as_decl (var->dv);
8634
8635 complete = true;
8636 last_limit = 0;
8637 n_var_parts = 0;
8638 if (!var->onepart)
8639 for (i = 0; i < var->n_var_parts; i++)
8640 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain)
8641 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc;
8642 for (i = 0; i < var->n_var_parts; i++)
8643 {
8644 machine_mode mode, wider_mode;
8645 rtx loc2;
8646 HOST_WIDE_INT offset;
8647
8648 if (i == 0 && var->onepart)
8649 {
8650 gcc_checking_assert (var->n_var_parts == 1);
8651 offset = 0;
8652 initialized = VAR_INIT_STATUS_INITIALIZED;
8653 loc2 = vt_expand_1pvar (var, vars);
8654 }
8655 else
8656 {
8657 if (last_limit < VAR_PART_OFFSET (var, i))
8658 {
8659 complete = false;
8660 break;
8661 }
8662 else if (last_limit > VAR_PART_OFFSET (var, i))
8663 continue;
8664 offset = VAR_PART_OFFSET (var, i);
8665 loc2 = var->var_part[i].cur_loc;
8666 if (loc2 && GET_CODE (loc2) == MEM
8667 && GET_CODE (XEXP (loc2, 0)) == VALUE)
8668 {
8669 rtx depval = XEXP (loc2, 0);
8670
8671 loc2 = vt_expand_loc (loc2, vars);
8672
8673 if (loc2)
8674 loc_exp_insert_dep (var, depval, vars);
8675 }
8676 if (!loc2)
8677 {
8678 complete = false;
8679 continue;
8680 }
8681 gcc_checking_assert (GET_CODE (loc2) != VALUE);
8682 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next)
8683 if (var->var_part[i].cur_loc == lc->loc)
8684 {
8685 initialized = lc->init;
8686 break;
8687 }
8688 gcc_assert (lc);
8689 }
8690
8691 offsets[n_var_parts] = offset;
8692 if (!loc2)
8693 {
8694 complete = false;
8695 continue;
8696 }
8697 loc[n_var_parts] = loc2;
8698 mode = GET_MODE (var->var_part[i].cur_loc);
8699 if (mode == VOIDmode && var->onepart)
8700 mode = DECL_MODE (decl);
8701 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8702
8703 /* Attempt to merge adjacent registers or memory. */
8704 wider_mode = GET_MODE_WIDER_MODE (mode);
8705 for (j = i + 1; j < var->n_var_parts; j++)
8706 if (last_limit <= VAR_PART_OFFSET (var, j))
8707 break;
8708 if (j < var->n_var_parts
8709 && wider_mode != VOIDmode
8710 && var->var_part[j].cur_loc
8711 && mode == GET_MODE (var->var_part[j].cur_loc)
8712 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts]))
8713 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j))
8714 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars))
8715 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2))
8716 {
8717 rtx new_loc = NULL;
8718
8719 if (REG_P (loc[n_var_parts])
8720 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2
8721 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode]
8722 && end_hard_regno (mode, REGNO (loc[n_var_parts]))
8723 == REGNO (loc2))
8724 {
8725 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN)
8726 new_loc = simplify_subreg (wider_mode, loc[n_var_parts],
8727 mode, 0);
8728 else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
8729 new_loc = simplify_subreg (wider_mode, loc2, mode, 0);
8730 if (new_loc)
8731 {
8732 if (!REG_P (new_loc)
8733 || REGNO (new_loc) != REGNO (loc[n_var_parts]))
8734 new_loc = NULL;
8735 else
8736 REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]);
8737 }
8738 }
8739 else if (MEM_P (loc[n_var_parts])
8740 && GET_CODE (XEXP (loc2, 0)) == PLUS
8741 && REG_P (XEXP (XEXP (loc2, 0), 0))
8742 && CONST_INT_P (XEXP (XEXP (loc2, 0), 1)))
8743 {
8744 if ((REG_P (XEXP (loc[n_var_parts], 0))
8745 && rtx_equal_p (XEXP (loc[n_var_parts], 0),
8746 XEXP (XEXP (loc2, 0), 0))
8747 && INTVAL (XEXP (XEXP (loc2, 0), 1))
8748 == GET_MODE_SIZE (mode))
8749 || (GET_CODE (XEXP (loc[n_var_parts], 0)) == PLUS
8750 && CONST_INT_P (XEXP (XEXP (loc[n_var_parts], 0), 1))
8751 && rtx_equal_p (XEXP (XEXP (loc[n_var_parts], 0), 0),
8752 XEXP (XEXP (loc2, 0), 0))
8753 && INTVAL (XEXP (XEXP (loc[n_var_parts], 0), 1))
8754 + GET_MODE_SIZE (mode)
8755 == INTVAL (XEXP (XEXP (loc2, 0), 1))))
8756 new_loc = adjust_address_nv (loc[n_var_parts],
8757 wider_mode, 0);
8758 }
8759
8760 if (new_loc)
8761 {
8762 loc[n_var_parts] = new_loc;
8763 mode = wider_mode;
8764 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode);
8765 i = j;
8766 }
8767 }
8768 ++n_var_parts;
8769 }
8770 type_size_unit = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8771 if ((unsigned HOST_WIDE_INT) last_limit < TREE_INT_CST_LOW (type_size_unit))
8772 complete = false;
8773
8774 if (! flag_var_tracking_uninit)
8775 initialized = VAR_INIT_STATUS_INITIALIZED;
8776
8777 note_vl = NULL_RTX;
8778 if (!complete)
8779 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized);
8780 else if (n_var_parts == 1)
8781 {
8782 rtx expr_list;
8783
8784 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL)
8785 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0]));
8786 else
8787 expr_list = loc[0];
8788
8789 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized);
8790 }
8791 else if (n_var_parts)
8792 {
8793 rtx parallel;
8794
8795 for (i = 0; i < n_var_parts; i++)
8796 loc[i]
8797 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i]));
8798
8799 parallel = gen_rtx_PARALLEL (VOIDmode,
8800 gen_rtvec_v (n_var_parts, loc));
8801 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl,
8802 parallel, initialized);
8803 }
8804
8805 if (where != EMIT_NOTE_BEFORE_INSN)
8806 {
8807 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8808 if (where == EMIT_NOTE_AFTER_CALL_INSN)
8809 NOTE_DURING_CALL_P (note) = true;
8810 }
8811 else
8812 {
8813 /* Make sure that the call related notes come first. */
8814 while (NEXT_INSN (insn)
8815 && NOTE_P (insn)
8816 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8817 && NOTE_DURING_CALL_P (insn))
8818 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8819 insn = NEXT_INSN (insn);
8820 if (NOTE_P (insn)
8821 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
8822 && NOTE_DURING_CALL_P (insn))
8823 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION))
8824 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn);
8825 else
8826 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn);
8827 }
8828 NOTE_VAR_LOCATION (note) = note_vl;
8829
8830 set_dv_changed (var->dv, false);
8831 gcc_assert (var->in_changed_variables);
8832 var->in_changed_variables = false;
8833 changed_variables->clear_slot (varp);
8834
8835 /* Continue traversing the hash table. */
8836 return 1;
8837 }
8838
8839 /* While traversing changed_variables, push onto DATA (a stack of RTX
8840 values) entries that aren't user variables. */
8841
8842 int
8843 var_track_values_to_stack (variable_def **slot,
8844 vec<rtx, va_heap> *changed_values_stack)
8845 {
8846 variable var = *slot;
8847
8848 if (var->onepart == ONEPART_VALUE)
8849 changed_values_stack->safe_push (dv_as_value (var->dv));
8850 else if (var->onepart == ONEPART_DEXPR)
8851 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
8852
8853 return 1;
8854 }
8855
8856 /* Remove from changed_variables the entry whose DV corresponds to
8857 value or debug_expr VAL. */
8858 static void
8859 remove_value_from_changed_variables (rtx val)
8860 {
8861 decl_or_value dv = dv_from_rtx (val);
8862 variable_def **slot;
8863 variable var;
8864
8865 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8866 NO_INSERT);
8867 var = *slot;
8868 var->in_changed_variables = false;
8869 changed_variables->clear_slot (slot);
8870 }
8871
8872 /* If VAL (a value or debug_expr) has backlinks to variables actively
8873 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as
8874 changed, adding to CHANGED_VALUES_STACK any dependencies that may
8875 have dependencies of their own to notify. */
8876
8877 static void
8878 notify_dependents_of_changed_value (rtx val, variable_table_type *htab,
8879 vec<rtx, va_heap> *changed_values_stack)
8880 {
8881 variable_def **slot;
8882 variable var;
8883 loc_exp_dep *led;
8884 decl_or_value dv = dv_from_rtx (val);
8885
8886 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv),
8887 NO_INSERT);
8888 if (!slot)
8889 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT);
8890 if (!slot)
8891 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv),
8892 NO_INSERT);
8893 var = *slot;
8894
8895 while ((led = VAR_LOC_DEP_LST (var)))
8896 {
8897 decl_or_value ldv = led->dv;
8898 variable ivar;
8899
8900 /* Deactivate and remove the backlink, as it was “used up”. It
8901 makes no sense to attempt to notify the same entity again:
8902 either it will be recomputed and re-register an active
8903 dependency, or it will still have the changed mark. */
8904 if (led->next)
8905 led->next->pprev = led->pprev;
8906 if (led->pprev)
8907 *led->pprev = led->next;
8908 led->next = NULL;
8909 led->pprev = NULL;
8910
8911 if (dv_changed_p (ldv))
8912 continue;
8913
8914 switch (dv_onepart_p (ldv))
8915 {
8916 case ONEPART_VALUE:
8917 case ONEPART_DEXPR:
8918 set_dv_changed (ldv, true);
8919 changed_values_stack->safe_push (dv_as_rtx (ldv));
8920 break;
8921
8922 case ONEPART_VDECL:
8923 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8924 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar));
8925 variable_was_changed (ivar, NULL);
8926 break;
8927
8928 case NOT_ONEPART:
8929 delete led;
8930 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv));
8931 if (ivar)
8932 {
8933 int i = ivar->n_var_parts;
8934 while (i--)
8935 {
8936 rtx loc = ivar->var_part[i].cur_loc;
8937
8938 if (loc && GET_CODE (loc) == MEM
8939 && XEXP (loc, 0) == val)
8940 {
8941 variable_was_changed (ivar, NULL);
8942 break;
8943 }
8944 }
8945 }
8946 break;
8947
8948 default:
8949 gcc_unreachable ();
8950 }
8951 }
8952 }
8953
8954 /* Take out of changed_variables any entries that don't refer to use
8955 variables. Back-propagate change notifications from values and
8956 debug_exprs to their active dependencies in HTAB or in
8957 CHANGED_VARIABLES. */
8958
8959 static void
8960 process_changed_values (variable_table_type *htab)
8961 {
8962 int i, n;
8963 rtx val;
8964 auto_vec<rtx, 20> changed_values_stack;
8965
8966 /* Move values from changed_variables to changed_values_stack. */
8967 changed_variables
8968 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack>
8969 (&changed_values_stack);
8970
8971 /* Back-propagate change notifications in values while popping
8972 them from the stack. */
8973 for (n = i = changed_values_stack.length ();
8974 i > 0; i = changed_values_stack.length ())
8975 {
8976 val = changed_values_stack.pop ();
8977 notify_dependents_of_changed_value (val, htab, &changed_values_stack);
8978
8979 /* This condition will hold when visiting each of the entries
8980 originally in changed_variables. We can't remove them
8981 earlier because this could drop the backlinks before we got a
8982 chance to use them. */
8983 if (i == n)
8984 {
8985 remove_value_from_changed_variables (val);
8986 n--;
8987 }
8988 }
8989 }
8990
8991 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
8992 CHANGED_VARIABLES and delete this chain. WHERE specifies whether
8993 the notes shall be emitted before of after instruction INSN. */
8994
8995 static void
8996 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where,
8997 shared_hash vars)
8998 {
8999 emit_note_data data;
9000 variable_table_type *htab = shared_hash_htab (vars);
9001
9002 if (!changed_variables->elements ())
9003 return;
9004
9005 if (MAY_HAVE_DEBUG_INSNS)
9006 process_changed_values (htab);
9007
9008 data.insn = insn;
9009 data.where = where;
9010 data.vars = htab;
9011
9012 changed_variables
9013 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data);
9014 }
9015
9016 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the
9017 same variable in hash table DATA or is not there at all. */
9018
9019 int
9020 emit_notes_for_differences_1 (variable_def **slot, variable_table_type *new_vars)
9021 {
9022 variable old_var, new_var;
9023
9024 old_var = *slot;
9025 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv));
9026
9027 if (!new_var)
9028 {
9029 /* Variable has disappeared. */
9030 variable empty_var = NULL;
9031
9032 if (old_var->onepart == ONEPART_VALUE
9033 || old_var->onepart == ONEPART_DEXPR)
9034 {
9035 empty_var = variable_from_dropped (old_var->dv, NO_INSERT);
9036 if (empty_var)
9037 {
9038 gcc_checking_assert (!empty_var->in_changed_variables);
9039 if (!VAR_LOC_1PAUX (old_var))
9040 {
9041 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var);
9042 VAR_LOC_1PAUX (empty_var) = NULL;
9043 }
9044 else
9045 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var));
9046 }
9047 }
9048
9049 if (!empty_var)
9050 {
9051 empty_var = onepart_pool (old_var->onepart).allocate ();
9052 empty_var->dv = old_var->dv;
9053 empty_var->refcount = 0;
9054 empty_var->n_var_parts = 0;
9055 empty_var->onepart = old_var->onepart;
9056 empty_var->in_changed_variables = false;
9057 }
9058
9059 if (empty_var->onepart)
9060 {
9061 /* Propagate the auxiliary data to (ultimately)
9062 changed_variables. */
9063 empty_var->var_part[0].loc_chain = NULL;
9064 empty_var->var_part[0].cur_loc = NULL;
9065 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var);
9066 VAR_LOC_1PAUX (old_var) = NULL;
9067 }
9068 variable_was_changed (empty_var, NULL);
9069 /* Continue traversing the hash table. */
9070 return 1;
9071 }
9072 /* Update cur_loc and one-part auxiliary data, before new_var goes
9073 through variable_was_changed. */
9074 if (old_var != new_var && new_var->onepart)
9075 {
9076 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL);
9077 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var);
9078 VAR_LOC_1PAUX (old_var) = NULL;
9079 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc;
9080 }
9081 if (variable_different_p (old_var, new_var))
9082 variable_was_changed (new_var, NULL);
9083
9084 /* Continue traversing the hash table. */
9085 return 1;
9086 }
9087
9088 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash
9089 table DATA. */
9090
9091 int
9092 emit_notes_for_differences_2 (variable_def **slot, variable_table_type *old_vars)
9093 {
9094 variable old_var, new_var;
9095
9096 new_var = *slot;
9097 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv));
9098 if (!old_var)
9099 {
9100 int i;
9101 for (i = 0; i < new_var->n_var_parts; i++)
9102 new_var->var_part[i].cur_loc = NULL;
9103 variable_was_changed (new_var, NULL);
9104 }
9105
9106 /* Continue traversing the hash table. */
9107 return 1;
9108 }
9109
9110 /* Emit notes before INSN for differences between dataflow sets OLD_SET and
9111 NEW_SET. */
9112
9113 static void
9114 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set,
9115 dataflow_set *new_set)
9116 {
9117 shared_hash_htab (old_set->vars)
9118 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9119 (shared_hash_htab (new_set->vars));
9120 shared_hash_htab (new_set->vars)
9121 ->traverse <variable_table_type *, emit_notes_for_differences_2>
9122 (shared_hash_htab (old_set->vars));
9123 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars);
9124 }
9125
9126 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */
9127
9128 static rtx_insn *
9129 next_non_note_insn_var_location (rtx_insn *insn)
9130 {
9131 while (insn)
9132 {
9133 insn = NEXT_INSN (insn);
9134 if (insn == 0
9135 || !NOTE_P (insn)
9136 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION)
9137 break;
9138 }
9139
9140 return insn;
9141 }
9142
9143 /* Emit the notes for changes of location parts in the basic block BB. */
9144
9145 static void
9146 emit_notes_in_bb (basic_block bb, dataflow_set *set)
9147 {
9148 unsigned int i;
9149 micro_operation *mo;
9150
9151 dataflow_set_clear (set);
9152 dataflow_set_copy (set, &VTI (bb)->in);
9153
9154 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
9155 {
9156 rtx_insn *insn = mo->insn;
9157 rtx_insn *next_insn = next_non_note_insn_var_location (insn);
9158
9159 switch (mo->type)
9160 {
9161 case MO_CALL:
9162 dataflow_set_clear_at_call (set);
9163 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars);
9164 {
9165 rtx arguments = mo->u.loc, *p = &arguments;
9166 rtx_note *note;
9167 while (*p)
9168 {
9169 XEXP (XEXP (*p, 0), 1)
9170 = vt_expand_loc (XEXP (XEXP (*p, 0), 1),
9171 shared_hash_htab (set->vars));
9172 /* If expansion is successful, keep it in the list. */
9173 if (XEXP (XEXP (*p, 0), 1))
9174 p = &XEXP (*p, 1);
9175 /* Otherwise, if the following item is data_value for it,
9176 drop it too too. */
9177 else if (XEXP (*p, 1)
9178 && REG_P (XEXP (XEXP (*p, 0), 0))
9179 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0))
9180 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0),
9181 0))
9182 && REGNO (XEXP (XEXP (*p, 0), 0))
9183 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0),
9184 0), 0)))
9185 *p = XEXP (XEXP (*p, 1), 1);
9186 /* Just drop this item. */
9187 else
9188 *p = XEXP (*p, 1);
9189 }
9190 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn);
9191 NOTE_VAR_LOCATION (note) = arguments;
9192 }
9193 break;
9194
9195 case MO_USE:
9196 {
9197 rtx loc = mo->u.loc;
9198
9199 if (REG_P (loc))
9200 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9201 else
9202 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL);
9203
9204 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9205 }
9206 break;
9207
9208 case MO_VAL_LOC:
9209 {
9210 rtx loc = mo->u.loc;
9211 rtx val, vloc;
9212 tree var;
9213
9214 if (GET_CODE (loc) == CONCAT)
9215 {
9216 val = XEXP (loc, 0);
9217 vloc = XEXP (loc, 1);
9218 }
9219 else
9220 {
9221 val = NULL_RTX;
9222 vloc = loc;
9223 }
9224
9225 var = PAT_VAR_LOCATION_DECL (vloc);
9226
9227 clobber_variable_part (set, NULL_RTX,
9228 dv_from_decl (var), 0, NULL_RTX);
9229 if (val)
9230 {
9231 if (VAL_NEEDS_RESOLUTION (loc))
9232 val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn);
9233 set_variable_part (set, val, dv_from_decl (var), 0,
9234 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9235 INSERT);
9236 }
9237 else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc)))
9238 set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc),
9239 dv_from_decl (var), 0,
9240 VAR_INIT_STATUS_INITIALIZED, NULL_RTX,
9241 INSERT);
9242
9243 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9244 }
9245 break;
9246
9247 case MO_VAL_USE:
9248 {
9249 rtx loc = mo->u.loc;
9250 rtx val, vloc, uloc;
9251
9252 vloc = uloc = XEXP (loc, 1);
9253 val = XEXP (loc, 0);
9254
9255 if (GET_CODE (val) == CONCAT)
9256 {
9257 uloc = XEXP (val, 1);
9258 val = XEXP (val, 0);
9259 }
9260
9261 if (VAL_NEEDS_RESOLUTION (loc))
9262 val_resolve (set, val, vloc, insn);
9263 else
9264 val_store (set, val, uloc, insn, false);
9265
9266 if (VAL_HOLDS_TRACK_EXPR (loc))
9267 {
9268 if (GET_CODE (uloc) == REG)
9269 var_reg_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9270 NULL);
9271 else if (GET_CODE (uloc) == MEM)
9272 var_mem_set (set, uloc, VAR_INIT_STATUS_UNINITIALIZED,
9273 NULL);
9274 }
9275
9276 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars);
9277 }
9278 break;
9279
9280 case MO_VAL_SET:
9281 {
9282 rtx loc = mo->u.loc;
9283 rtx val, vloc, uloc;
9284 rtx dstv, srcv;
9285
9286 vloc = loc;
9287 uloc = XEXP (vloc, 1);
9288 val = XEXP (vloc, 0);
9289 vloc = uloc;
9290
9291 if (GET_CODE (uloc) == SET)
9292 {
9293 dstv = SET_DEST (uloc);
9294 srcv = SET_SRC (uloc);
9295 }
9296 else
9297 {
9298 dstv = uloc;
9299 srcv = NULL;
9300 }
9301
9302 if (GET_CODE (val) == CONCAT)
9303 {
9304 dstv = vloc = XEXP (val, 1);
9305 val = XEXP (val, 0);
9306 }
9307
9308 if (GET_CODE (vloc) == SET)
9309 {
9310 srcv = SET_SRC (vloc);
9311
9312 gcc_assert (val != srcv);
9313 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc));
9314
9315 dstv = vloc = SET_DEST (vloc);
9316
9317 if (VAL_NEEDS_RESOLUTION (loc))
9318 val_resolve (set, val, srcv, insn);
9319 }
9320 else if (VAL_NEEDS_RESOLUTION (loc))
9321 {
9322 gcc_assert (GET_CODE (uloc) == SET
9323 && GET_CODE (SET_SRC (uloc)) == REG);
9324 val_resolve (set, val, SET_SRC (uloc), insn);
9325 }
9326
9327 if (VAL_HOLDS_TRACK_EXPR (loc))
9328 {
9329 if (VAL_EXPR_IS_CLOBBERED (loc))
9330 {
9331 if (REG_P (uloc))
9332 var_reg_delete (set, uloc, true);
9333 else if (MEM_P (uloc))
9334 {
9335 gcc_assert (MEM_P (dstv));
9336 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc));
9337 var_mem_delete (set, dstv, true);
9338 }
9339 }
9340 else
9341 {
9342 bool copied_p = VAL_EXPR_IS_COPIED (loc);
9343 rtx src = NULL, dst = uloc;
9344 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED;
9345
9346 if (GET_CODE (uloc) == SET)
9347 {
9348 src = SET_SRC (uloc);
9349 dst = SET_DEST (uloc);
9350 }
9351
9352 if (copied_p)
9353 {
9354 status = find_src_status (set, src);
9355
9356 src = find_src_set_src (set, src);
9357 }
9358
9359 if (REG_P (dst))
9360 var_reg_delete_and_set (set, dst, !copied_p,
9361 status, srcv);
9362 else if (MEM_P (dst))
9363 {
9364 gcc_assert (MEM_P (dstv));
9365 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst));
9366 var_mem_delete_and_set (set, dstv, !copied_p,
9367 status, srcv);
9368 }
9369 }
9370 }
9371 else if (REG_P (uloc))
9372 var_regno_delete (set, REGNO (uloc));
9373 else if (MEM_P (uloc))
9374 {
9375 gcc_checking_assert (GET_CODE (vloc) == MEM);
9376 gcc_checking_assert (vloc == dstv);
9377 if (vloc != dstv)
9378 clobber_overlapping_mems (set, vloc);
9379 }
9380
9381 val_store (set, val, dstv, insn, true);
9382
9383 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9384 set->vars);
9385 }
9386 break;
9387
9388 case MO_SET:
9389 {
9390 rtx loc = mo->u.loc;
9391 rtx set_src = NULL;
9392
9393 if (GET_CODE (loc) == SET)
9394 {
9395 set_src = SET_SRC (loc);
9396 loc = SET_DEST (loc);
9397 }
9398
9399 if (REG_P (loc))
9400 var_reg_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9401 set_src);
9402 else
9403 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED,
9404 set_src);
9405
9406 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9407 set->vars);
9408 }
9409 break;
9410
9411 case MO_COPY:
9412 {
9413 rtx loc = mo->u.loc;
9414 enum var_init_status src_status;
9415 rtx set_src = NULL;
9416
9417 if (GET_CODE (loc) == SET)
9418 {
9419 set_src = SET_SRC (loc);
9420 loc = SET_DEST (loc);
9421 }
9422
9423 src_status = find_src_status (set, set_src);
9424 set_src = find_src_set_src (set, set_src);
9425
9426 if (REG_P (loc))
9427 var_reg_delete_and_set (set, loc, false, src_status, set_src);
9428 else
9429 var_mem_delete_and_set (set, loc, false, src_status, set_src);
9430
9431 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9432 set->vars);
9433 }
9434 break;
9435
9436 case MO_USE_NO_VAR:
9437 {
9438 rtx loc = mo->u.loc;
9439
9440 if (REG_P (loc))
9441 var_reg_delete (set, loc, false);
9442 else
9443 var_mem_delete (set, loc, false);
9444
9445 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars);
9446 }
9447 break;
9448
9449 case MO_CLOBBER:
9450 {
9451 rtx loc = mo->u.loc;
9452
9453 if (REG_P (loc))
9454 var_reg_delete (set, loc, true);
9455 else
9456 var_mem_delete (set, loc, true);
9457
9458 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN,
9459 set->vars);
9460 }
9461 break;
9462
9463 case MO_ADJUST:
9464 set->stack_adjust += mo->u.adjust;
9465 break;
9466 }
9467 }
9468 }
9469
9470 /* Emit notes for the whole function. */
9471
9472 static void
9473 vt_emit_notes (void)
9474 {
9475 basic_block bb;
9476 dataflow_set cur;
9477
9478 gcc_assert (!changed_variables->elements ());
9479
9480 /* Free memory occupied by the out hash tables, as they aren't used
9481 anymore. */
9482 FOR_EACH_BB_FN (bb, cfun)
9483 dataflow_set_clear (&VTI (bb)->out);
9484
9485 /* Enable emitting notes by functions (mainly by set_variable_part and
9486 delete_variable_part). */
9487 emit_notes = true;
9488
9489 if (MAY_HAVE_DEBUG_INSNS)
9490 {
9491 dropped_values = new variable_table_type (cselib_get_next_uid () * 2);
9492 }
9493
9494 dataflow_set_init (&cur);
9495
9496 FOR_EACH_BB_FN (bb, cfun)
9497 {
9498 /* Emit the notes for changes of variable locations between two
9499 subsequent basic blocks. */
9500 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in);
9501
9502 if (MAY_HAVE_DEBUG_INSNS)
9503 local_get_addr_cache = new hash_map<rtx, rtx>;
9504
9505 /* Emit the notes for the changes in the basic block itself. */
9506 emit_notes_in_bb (bb, &cur);
9507
9508 if (MAY_HAVE_DEBUG_INSNS)
9509 delete local_get_addr_cache;
9510 local_get_addr_cache = NULL;
9511
9512 /* Free memory occupied by the in hash table, we won't need it
9513 again. */
9514 dataflow_set_clear (&VTI (bb)->in);
9515 }
9516 #ifdef ENABLE_CHECKING
9517 shared_hash_htab (cur.vars)
9518 ->traverse <variable_table_type *, emit_notes_for_differences_1>
9519 (shared_hash_htab (empty_shared_hash));
9520 #endif
9521 dataflow_set_destroy (&cur);
9522
9523 if (MAY_HAVE_DEBUG_INSNS)
9524 delete dropped_values;
9525 dropped_values = NULL;
9526
9527 emit_notes = false;
9528 }
9529
9530 /* If there is a declaration and offset associated with register/memory RTL
9531 assign declaration to *DECLP and offset to *OFFSETP, and return true. */
9532
9533 static bool
9534 vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
9535 {
9536 if (REG_P (rtl))
9537 {
9538 if (REG_ATTRS (rtl))
9539 {
9540 *declp = REG_EXPR (rtl);
9541 *offsetp = REG_OFFSET (rtl);
9542 return true;
9543 }
9544 }
9545 else if (GET_CODE (rtl) == PARALLEL)
9546 {
9547 tree decl = NULL_TREE;
9548 HOST_WIDE_INT offset = MAX_VAR_PARTS;
9549 int len = XVECLEN (rtl, 0), i;
9550
9551 for (i = 0; i < len; i++)
9552 {
9553 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0);
9554 if (!REG_P (reg) || !REG_ATTRS (reg))
9555 break;
9556 if (!decl)
9557 decl = REG_EXPR (reg);
9558 if (REG_EXPR (reg) != decl)
9559 break;
9560 if (REG_OFFSET (reg) < offset)
9561 offset = REG_OFFSET (reg);
9562 }
9563
9564 if (i == len)
9565 {
9566 *declp = decl;
9567 *offsetp = offset;
9568 return true;
9569 }
9570 }
9571 else if (MEM_P (rtl))
9572 {
9573 if (MEM_ATTRS (rtl))
9574 {
9575 *declp = MEM_EXPR (rtl);
9576 *offsetp = INT_MEM_OFFSET (rtl);
9577 return true;
9578 }
9579 }
9580 return false;
9581 }
9582
9583 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence
9584 of VAL. */
9585
9586 static void
9587 record_entry_value (cselib_val *val, rtx rtl)
9588 {
9589 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl));
9590
9591 ENTRY_VALUE_EXP (ev) = rtl;
9592
9593 cselib_add_permanent_equiv (val, ev, get_insns ());
9594 }
9595
9596 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */
9597
9598 static void
9599 vt_add_function_parameter (tree parm)
9600 {
9601 rtx decl_rtl = DECL_RTL_IF_SET (parm);
9602 rtx incoming = DECL_INCOMING_RTL (parm);
9603 tree decl;
9604 machine_mode mode;
9605 HOST_WIDE_INT offset;
9606 dataflow_set *out;
9607 decl_or_value dv;
9608
9609 if (TREE_CODE (parm) != PARM_DECL)
9610 return;
9611
9612 if (!decl_rtl || !incoming)
9613 return;
9614
9615 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode)
9616 return;
9617
9618 /* If there is a DRAP register or a pseudo in internal_arg_pointer,
9619 rewrite the incoming location of parameters passed on the stack
9620 into MEMs based on the argument pointer, so that incoming doesn't
9621 depend on a pseudo. */
9622 if (MEM_P (incoming)
9623 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer
9624 || (GET_CODE (XEXP (incoming, 0)) == PLUS
9625 && XEXP (XEXP (incoming, 0), 0)
9626 == crtl->args.internal_arg_pointer
9627 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
9628 {
9629 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl);
9630 if (GET_CODE (XEXP (incoming, 0)) == PLUS)
9631 off += INTVAL (XEXP (XEXP (incoming, 0), 1));
9632 incoming
9633 = replace_equiv_address_nv (incoming,
9634 plus_constant (Pmode,
9635 arg_pointer_rtx, off));
9636 }
9637
9638 #ifdef HAVE_window_save
9639 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
9640 If the target machine has an explicit window save instruction, the
9641 actual entry value is the corresponding OUTGOING_REGNO instead. */
9642 if (HAVE_window_save && !crtl->uses_only_leaf_regs)
9643 {
9644 if (REG_P (incoming)
9645 && HARD_REGISTER_P (incoming)
9646 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
9647 {
9648 parm_reg_t p;
9649 p.incoming = incoming;
9650 incoming
9651 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
9652 OUTGOING_REGNO (REGNO (incoming)), 0);
9653 p.outgoing = incoming;
9654 vec_safe_push (windowed_parm_regs, p);
9655 }
9656 else if (GET_CODE (incoming) == PARALLEL)
9657 {
9658 rtx outgoing
9659 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0)));
9660 int i;
9661
9662 for (i = 0; i < XVECLEN (incoming, 0); i++)
9663 {
9664 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9665 parm_reg_t p;
9666 p.incoming = reg;
9667 reg = gen_rtx_REG_offset (reg, GET_MODE (reg),
9668 OUTGOING_REGNO (REGNO (reg)), 0);
9669 p.outgoing = reg;
9670 XVECEXP (outgoing, 0, i)
9671 = gen_rtx_EXPR_LIST (VOIDmode, reg,
9672 XEXP (XVECEXP (incoming, 0, i), 1));
9673 vec_safe_push (windowed_parm_regs, p);
9674 }
9675
9676 incoming = outgoing;
9677 }
9678 else if (MEM_P (incoming)
9679 && REG_P (XEXP (incoming, 0))
9680 && HARD_REGISTER_P (XEXP (incoming, 0)))
9681 {
9682 rtx reg = XEXP (incoming, 0);
9683 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
9684 {
9685 parm_reg_t p;
9686 p.incoming = reg;
9687 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
9688 p.outgoing = reg;
9689 vec_safe_push (windowed_parm_regs, p);
9690 incoming = replace_equiv_address_nv (incoming, reg);
9691 }
9692 }
9693 }
9694 #endif
9695
9696 if (!vt_get_decl_and_offset (incoming, &decl, &offset))
9697 {
9698 if (MEM_P (incoming))
9699 {
9700 /* This means argument is passed by invisible reference. */
9701 offset = 0;
9702 decl = parm;
9703 }
9704 else
9705 {
9706 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset))
9707 return;
9708 offset += byte_lowpart_offset (GET_MODE (incoming),
9709 GET_MODE (decl_rtl));
9710 }
9711 }
9712
9713 if (!decl)
9714 return;
9715
9716 if (parm != decl)
9717 {
9718 /* If that DECL_RTL wasn't a pseudo that got spilled to
9719 memory, bail out. Otherwise, the spill slot sharing code
9720 will force the memory to reference spill_slot_decl (%sfp),
9721 so we don't match above. That's ok, the pseudo must have
9722 referenced the entire parameter, so just reset OFFSET. */
9723 if (decl != get_spill_slot_decl (false))
9724 return;
9725 offset = 0;
9726 }
9727
9728 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset))
9729 return;
9730
9731 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out;
9732
9733 dv = dv_from_decl (parm);
9734
9735 if (target_for_debug_bind (parm)
9736 /* We can't deal with these right now, because this kind of
9737 variable is single-part. ??? We could handle parallels
9738 that describe multiple locations for the same single
9739 value, but ATM we don't. */
9740 && GET_CODE (incoming) != PARALLEL)
9741 {
9742 cselib_val *val;
9743 rtx lowpart;
9744
9745 /* ??? We shouldn't ever hit this, but it may happen because
9746 arguments passed by invisible reference aren't dealt with
9747 above: incoming-rtl will have Pmode rather than the
9748 expected mode for the type. */
9749 if (offset)
9750 return;
9751
9752 lowpart = var_lowpart (mode, incoming);
9753 if (!lowpart)
9754 return;
9755
9756 val = cselib_lookup_from_insn (lowpart, mode, true,
9757 VOIDmode, get_insns ());
9758
9759 /* ??? Float-typed values in memory are not handled by
9760 cselib. */
9761 if (val)
9762 {
9763 preserve_value (val);
9764 set_variable_part (out, val->val_rtx, dv, offset,
9765 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9766 dv = dv_from_value (val->val_rtx);
9767 }
9768
9769 if (MEM_P (incoming))
9770 {
9771 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true,
9772 VOIDmode, get_insns ());
9773 if (val)
9774 {
9775 preserve_value (val);
9776 incoming = replace_equiv_address_nv (incoming, val->val_rtx);
9777 }
9778 }
9779 }
9780
9781 if (REG_P (incoming))
9782 {
9783 incoming = var_lowpart (mode, incoming);
9784 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER);
9785 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset,
9786 incoming);
9787 set_variable_part (out, incoming, dv, offset,
9788 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9789 if (dv_is_value_p (dv))
9790 {
9791 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming);
9792 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE
9793 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm))))
9794 {
9795 machine_mode indmode
9796 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm)));
9797 rtx mem = gen_rtx_MEM (indmode, incoming);
9798 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true,
9799 VOIDmode,
9800 get_insns ());
9801 if (val)
9802 {
9803 preserve_value (val);
9804 record_entry_value (val, mem);
9805 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0,
9806 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9807 }
9808 }
9809 }
9810 }
9811 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv))
9812 {
9813 int i;
9814
9815 for (i = 0; i < XVECLEN (incoming, 0); i++)
9816 {
9817 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0);
9818 offset = REG_OFFSET (reg);
9819 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
9820 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg);
9821 set_variable_part (out, reg, dv, offset,
9822 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9823 }
9824 }
9825 else if (MEM_P (incoming))
9826 {
9827 incoming = var_lowpart (mode, incoming);
9828 set_variable_part (out, incoming, dv, offset,
9829 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT);
9830 }
9831 }
9832
9833 /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */
9834
9835 static void
9836 vt_add_function_parameters (void)
9837 {
9838 tree parm;
9839
9840 for (parm = DECL_ARGUMENTS (current_function_decl);
9841 parm; parm = DECL_CHAIN (parm))
9842 if (!POINTER_BOUNDS_P (parm))
9843 vt_add_function_parameter (parm);
9844
9845 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl)))
9846 {
9847 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl));
9848
9849 if (TREE_CODE (vexpr) == INDIRECT_REF)
9850 vexpr = TREE_OPERAND (vexpr, 0);
9851
9852 if (TREE_CODE (vexpr) == PARM_DECL
9853 && DECL_ARTIFICIAL (vexpr)
9854 && !DECL_IGNORED_P (vexpr)
9855 && DECL_NAMELESS (vexpr))
9856 vt_add_function_parameter (vexpr);
9857 }
9858 }
9859
9860 /* Initialize cfa_base_rtx, create a preserved VALUE for it and
9861 ensure it isn't flushed during cselib_reset_table.
9862 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx
9863 has been eliminated. */
9864
9865 static void
9866 vt_init_cfa_base (void)
9867 {
9868 cselib_val *val;
9869
9870 #ifdef FRAME_POINTER_CFA_OFFSET
9871 cfa_base_rtx = frame_pointer_rtx;
9872 cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl);
9873 #else
9874 cfa_base_rtx = arg_pointer_rtx;
9875 cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl);
9876 #endif
9877 if (cfa_base_rtx == hard_frame_pointer_rtx
9878 || !fixed_regs[REGNO (cfa_base_rtx)])
9879 {
9880 cfa_base_rtx = NULL_RTX;
9881 return;
9882 }
9883 if (!MAY_HAVE_DEBUG_INSNS)
9884 return;
9885
9886 /* Tell alias analysis that cfa_base_rtx should share
9887 find_base_term value with stack pointer or hard frame pointer. */
9888 if (!frame_pointer_needed)
9889 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx);
9890 else if (!crtl->stack_realign_tried)
9891 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx);
9892
9893 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1,
9894 VOIDmode, get_insns ());
9895 preserve_value (val);
9896 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx));
9897 }
9898
9899 /* Allocate and initialize the data structures for variable tracking
9900 and parse the RTL to get the micro operations. */
9901
9902 static bool
9903 vt_initialize (void)
9904 {
9905 basic_block bb;
9906 HOST_WIDE_INT fp_cfa_offset = -1;
9907
9908 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def));
9909
9910 empty_shared_hash = new shared_hash_def;
9911 empty_shared_hash->refcount = 1;
9912 empty_shared_hash->htab = new variable_table_type (1);
9913 changed_variables = new variable_table_type (10);
9914
9915 /* Init the IN and OUT sets. */
9916 FOR_ALL_BB_FN (bb, cfun)
9917 {
9918 VTI (bb)->visited = false;
9919 VTI (bb)->flooded = false;
9920 dataflow_set_init (&VTI (bb)->in);
9921 dataflow_set_init (&VTI (bb)->out);
9922 VTI (bb)->permp = NULL;
9923 }
9924
9925 if (MAY_HAVE_DEBUG_INSNS)
9926 {
9927 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS);
9928 scratch_regs = BITMAP_ALLOC (NULL);
9929 preserved_values.create (256);
9930 global_get_addr_cache = new hash_map<rtx, rtx>;
9931 }
9932 else
9933 {
9934 scratch_regs = NULL;
9935 global_get_addr_cache = NULL;
9936 }
9937
9938 if (MAY_HAVE_DEBUG_INSNS)
9939 {
9940 rtx reg, expr;
9941 int ofst;
9942 cselib_val *val;
9943
9944 #ifdef FRAME_POINTER_CFA_OFFSET
9945 reg = frame_pointer_rtx;
9946 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl);
9947 #else
9948 reg = arg_pointer_rtx;
9949 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl);
9950 #endif
9951
9952 ofst -= INCOMING_FRAME_SP_OFFSET;
9953
9954 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1,
9955 VOIDmode, get_insns ());
9956 preserve_value (val);
9957 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)])
9958 cselib_preserve_cfa_base_value (val, REGNO (reg));
9959 expr = plus_constant (GET_MODE (stack_pointer_rtx),
9960 stack_pointer_rtx, -ofst);
9961 cselib_add_permanent_equiv (val, expr, get_insns ());
9962
9963 if (ofst)
9964 {
9965 val = cselib_lookup_from_insn (stack_pointer_rtx,
9966 GET_MODE (stack_pointer_rtx), 1,
9967 VOIDmode, get_insns ());
9968 preserve_value (val);
9969 expr = plus_constant (GET_MODE (reg), reg, ofst);
9970 cselib_add_permanent_equiv (val, expr, get_insns ());
9971 }
9972 }
9973
9974 /* In order to factor out the adjustments made to the stack pointer or to
9975 the hard frame pointer and thus be able to use DW_OP_fbreg operations
9976 instead of individual location lists, we're going to rewrite MEMs based
9977 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx
9978 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx
9979 resp. arg_pointer_rtx. We can do this either when there is no frame
9980 pointer in the function and stack adjustments are consistent for all
9981 basic blocks or when there is a frame pointer and no stack realignment.
9982 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx
9983 has been eliminated. */
9984 if (!frame_pointer_needed)
9985 {
9986 rtx reg, elim;
9987
9988 if (!vt_stack_adjustments ())
9989 return false;
9990
9991 #ifdef FRAME_POINTER_CFA_OFFSET
9992 reg = frame_pointer_rtx;
9993 #else
9994 reg = arg_pointer_rtx;
9995 #endif
9996 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
9997 if (elim != reg)
9998 {
9999 if (GET_CODE (elim) == PLUS)
10000 elim = XEXP (elim, 0);
10001 if (elim == stack_pointer_rtx)
10002 vt_init_cfa_base ();
10003 }
10004 }
10005 else if (!crtl->stack_realign_tried)
10006 {
10007 rtx reg, elim;
10008
10009 #ifdef FRAME_POINTER_CFA_OFFSET
10010 reg = frame_pointer_rtx;
10011 fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
10012 #else
10013 reg = arg_pointer_rtx;
10014 fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
10015 #endif
10016 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10017 if (elim != reg)
10018 {
10019 if (GET_CODE (elim) == PLUS)
10020 {
10021 fp_cfa_offset -= INTVAL (XEXP (elim, 1));
10022 elim = XEXP (elim, 0);
10023 }
10024 if (elim != hard_frame_pointer_rtx)
10025 fp_cfa_offset = -1;
10026 }
10027 else
10028 fp_cfa_offset = -1;
10029 }
10030
10031 /* If the stack is realigned and a DRAP register is used, we're going to
10032 rewrite MEMs based on it representing incoming locations of parameters
10033 passed on the stack into MEMs based on the argument pointer. Although
10034 we aren't going to rewrite other MEMs, we still need to initialize the
10035 virtual CFA pointer in order to ensure that the argument pointer will
10036 be seen as a constant throughout the function.
10037
10038 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */
10039 else if (stack_realign_drap)
10040 {
10041 rtx reg, elim;
10042
10043 #ifdef FRAME_POINTER_CFA_OFFSET
10044 reg = frame_pointer_rtx;
10045 #else
10046 reg = arg_pointer_rtx;
10047 #endif
10048 elim = eliminate_regs (reg, VOIDmode, NULL_RTX);
10049 if (elim != reg)
10050 {
10051 if (GET_CODE (elim) == PLUS)
10052 elim = XEXP (elim, 0);
10053 if (elim == hard_frame_pointer_rtx)
10054 vt_init_cfa_base ();
10055 }
10056 }
10057
10058 hard_frame_pointer_adjustment = -1;
10059
10060 vt_add_function_parameters ();
10061
10062 FOR_EACH_BB_FN (bb, cfun)
10063 {
10064 rtx_insn *insn;
10065 HOST_WIDE_INT pre, post = 0;
10066 basic_block first_bb, last_bb;
10067
10068 if (MAY_HAVE_DEBUG_INSNS)
10069 {
10070 cselib_record_sets_hook = add_with_sets;
10071 if (dump_file && (dump_flags & TDF_DETAILS))
10072 fprintf (dump_file, "first value: %i\n",
10073 cselib_get_next_uid ());
10074 }
10075
10076 first_bb = bb;
10077 for (;;)
10078 {
10079 edge e;
10080 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
10081 || ! single_pred_p (bb->next_bb))
10082 break;
10083 e = find_edge (bb, bb->next_bb);
10084 if (! e || (e->flags & EDGE_FALLTHRU) == 0)
10085 break;
10086 bb = bb->next_bb;
10087 }
10088 last_bb = bb;
10089
10090 /* Add the micro-operations to the vector. */
10091 FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb)
10092 {
10093 HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust;
10094 VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust;
10095 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
10096 insn = NEXT_INSN (insn))
10097 {
10098 if (INSN_P (insn))
10099 {
10100 if (!frame_pointer_needed)
10101 {
10102 insn_stack_adjust_offset_pre_post (insn, &pre, &post);
10103 if (pre)
10104 {
10105 micro_operation mo;
10106 mo.type = MO_ADJUST;
10107 mo.u.adjust = pre;
10108 mo.insn = insn;
10109 if (dump_file && (dump_flags & TDF_DETAILS))
10110 log_op_type (PATTERN (insn), bb, insn,
10111 MO_ADJUST, dump_file);
10112 VTI (bb)->mos.safe_push (mo);
10113 VTI (bb)->out.stack_adjust += pre;
10114 }
10115 }
10116
10117 cselib_hook_called = false;
10118 adjust_insn (bb, insn);
10119 if (MAY_HAVE_DEBUG_INSNS)
10120 {
10121 if (CALL_P (insn))
10122 prepare_call_arguments (bb, insn);
10123 cselib_process_insn (insn);
10124 if (dump_file && (dump_flags & TDF_DETAILS))
10125 {
10126 print_rtl_single (dump_file, insn);
10127 dump_cselib_table (dump_file);
10128 }
10129 }
10130 if (!cselib_hook_called)
10131 add_with_sets (insn, 0, 0);
10132 cancel_changes (0);
10133
10134 if (!frame_pointer_needed && post)
10135 {
10136 micro_operation mo;
10137 mo.type = MO_ADJUST;
10138 mo.u.adjust = post;
10139 mo.insn = insn;
10140 if (dump_file && (dump_flags & TDF_DETAILS))
10141 log_op_type (PATTERN (insn), bb, insn,
10142 MO_ADJUST, dump_file);
10143 VTI (bb)->mos.safe_push (mo);
10144 VTI (bb)->out.stack_adjust += post;
10145 }
10146
10147 if (fp_cfa_offset != -1
10148 && hard_frame_pointer_adjustment == -1
10149 && fp_setter_insn (insn))
10150 {
10151 vt_init_cfa_base ();
10152 hard_frame_pointer_adjustment = fp_cfa_offset;
10153 /* Disassociate sp from fp now. */
10154 if (MAY_HAVE_DEBUG_INSNS)
10155 {
10156 cselib_val *v;
10157 cselib_invalidate_rtx (stack_pointer_rtx);
10158 v = cselib_lookup (stack_pointer_rtx, Pmode, 1,
10159 VOIDmode);
10160 if (v && !cselib_preserved_value_p (v))
10161 {
10162 cselib_set_value_sp_based (v);
10163 preserve_value (v);
10164 }
10165 }
10166 }
10167 }
10168 }
10169 gcc_assert (offset == VTI (bb)->out.stack_adjust);
10170 }
10171
10172 bb = last_bb;
10173
10174 if (MAY_HAVE_DEBUG_INSNS)
10175 {
10176 cselib_preserve_only_values ();
10177 cselib_reset_table (cselib_get_next_uid ());
10178 cselib_record_sets_hook = NULL;
10179 }
10180 }
10181
10182 hard_frame_pointer_adjustment = -1;
10183 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true;
10184 cfa_base_rtx = NULL_RTX;
10185 return true;
10186 }
10187
10188 /* This is *not* reset after each function. It gives each
10189 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation
10190 a unique label number. */
10191
10192 static int debug_label_num = 1;
10193
10194 /* Get rid of all debug insns from the insn stream. */
10195
10196 static void
10197 delete_debug_insns (void)
10198 {
10199 basic_block bb;
10200 rtx_insn *insn, *next;
10201
10202 if (!MAY_HAVE_DEBUG_INSNS)
10203 return;
10204
10205 FOR_EACH_BB_FN (bb, cfun)
10206 {
10207 FOR_BB_INSNS_SAFE (bb, insn, next)
10208 if (DEBUG_INSN_P (insn))
10209 {
10210 tree decl = INSN_VAR_LOCATION_DECL (insn);
10211 if (TREE_CODE (decl) == LABEL_DECL
10212 && DECL_NAME (decl)
10213 && !DECL_RTL_SET_P (decl))
10214 {
10215 PUT_CODE (insn, NOTE);
10216 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL;
10217 NOTE_DELETED_LABEL_NAME (insn)
10218 = IDENTIFIER_POINTER (DECL_NAME (decl));
10219 SET_DECL_RTL (decl, insn);
10220 CODE_LABEL_NUMBER (insn) = debug_label_num++;
10221 }
10222 else
10223 delete_insn (insn);
10224 }
10225 }
10226 }
10227
10228 /* Run a fast, BB-local only version of var tracking, to take care of
10229 information that we don't do global analysis on, such that not all
10230 information is lost. If SKIPPED holds, we're skipping the global
10231 pass entirely, so we should try to use information it would have
10232 handled as well.. */
10233
10234 static void
10235 vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED)
10236 {
10237 /* ??? Just skip it all for now. */
10238 delete_debug_insns ();
10239 }
10240
10241 /* Free the data structures needed for variable tracking. */
10242
10243 static void
10244 vt_finalize (void)
10245 {
10246 basic_block bb;
10247
10248 FOR_EACH_BB_FN (bb, cfun)
10249 {
10250 VTI (bb)->mos.release ();
10251 }
10252
10253 FOR_ALL_BB_FN (bb, cfun)
10254 {
10255 dataflow_set_destroy (&VTI (bb)->in);
10256 dataflow_set_destroy (&VTI (bb)->out);
10257 if (VTI (bb)->permp)
10258 {
10259 dataflow_set_destroy (VTI (bb)->permp);
10260 XDELETE (VTI (bb)->permp);
10261 }
10262 }
10263 free_aux_for_blocks ();
10264 delete empty_shared_hash->htab;
10265 empty_shared_hash->htab = NULL;
10266 delete changed_variables;
10267 changed_variables = NULL;
10268 attrs_def::pool.release ();
10269 var_pool.release ();
10270 location_chain_def::pool.release ();
10271 shared_hash_def::pool.release ();
10272
10273 if (MAY_HAVE_DEBUG_INSNS)
10274 {
10275 if (global_get_addr_cache)
10276 delete global_get_addr_cache;
10277 global_get_addr_cache = NULL;
10278 loc_exp_dep::pool.release ();
10279 valvar_pool.release ();
10280 preserved_values.release ();
10281 cselib_finish ();
10282 BITMAP_FREE (scratch_regs);
10283 scratch_regs = NULL;
10284 }
10285
10286 #ifdef HAVE_window_save
10287 vec_free (windowed_parm_regs);
10288 #endif
10289
10290 if (vui_vec)
10291 XDELETEVEC (vui_vec);
10292 vui_vec = NULL;
10293 vui_allocated = 0;
10294 }
10295
10296 /* The entry point to variable tracking pass. */
10297
10298 static inline unsigned int
10299 variable_tracking_main_1 (void)
10300 {
10301 bool success;
10302
10303 if (flag_var_tracking_assignments < 0
10304 /* Var-tracking right now assumes the IR doesn't contain
10305 any pseudos at this point. */
10306 || targetm.no_register_allocation)
10307 {
10308 delete_debug_insns ();
10309 return 0;
10310 }
10311
10312 if (n_basic_blocks_for_fn (cfun) > 500 &&
10313 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20)
10314 {
10315 vt_debug_insns_local (true);
10316 return 0;
10317 }
10318
10319 mark_dfs_back_edges ();
10320 if (!vt_initialize ())
10321 {
10322 vt_finalize ();
10323 vt_debug_insns_local (true);
10324 return 0;
10325 }
10326
10327 success = vt_find_locations ();
10328
10329 if (!success && flag_var_tracking_assignments > 0)
10330 {
10331 vt_finalize ();
10332
10333 delete_debug_insns ();
10334
10335 /* This is later restored by our caller. */
10336 flag_var_tracking_assignments = 0;
10337
10338 success = vt_initialize ();
10339 gcc_assert (success);
10340
10341 success = vt_find_locations ();
10342 }
10343
10344 if (!success)
10345 {
10346 vt_finalize ();
10347 vt_debug_insns_local (false);
10348 return 0;
10349 }
10350
10351 if (dump_file && (dump_flags & TDF_DETAILS))
10352 {
10353 dump_dataflow_sets ();
10354 dump_reg_info (dump_file);
10355 dump_flow_info (dump_file, dump_flags);
10356 }
10357
10358 timevar_push (TV_VAR_TRACKING_EMIT);
10359 vt_emit_notes ();
10360 timevar_pop (TV_VAR_TRACKING_EMIT);
10361
10362 vt_finalize ();
10363 vt_debug_insns_local (false);
10364 return 0;
10365 }
10366
10367 unsigned int
10368 variable_tracking_main (void)
10369 {
10370 unsigned int ret;
10371 int save = flag_var_tracking_assignments;
10372
10373 ret = variable_tracking_main_1 ();
10374
10375 flag_var_tracking_assignments = save;
10376
10377 return ret;
10378 }
10379 \f
10380 namespace {
10381
10382 const pass_data pass_data_variable_tracking =
10383 {
10384 RTL_PASS, /* type */
10385 "vartrack", /* name */
10386 OPTGROUP_NONE, /* optinfo_flags */
10387 TV_VAR_TRACKING, /* tv_id */
10388 0, /* properties_required */
10389 0, /* properties_provided */
10390 0, /* properties_destroyed */
10391 0, /* todo_flags_start */
10392 0, /* todo_flags_finish */
10393 };
10394
10395 class pass_variable_tracking : public rtl_opt_pass
10396 {
10397 public:
10398 pass_variable_tracking (gcc::context *ctxt)
10399 : rtl_opt_pass (pass_data_variable_tracking, ctxt)
10400 {}
10401
10402 /* opt_pass methods: */
10403 virtual bool gate (function *)
10404 {
10405 return (flag_var_tracking && !targetm.delay_vartrack);
10406 }
10407
10408 virtual unsigned int execute (function *)
10409 {
10410 return variable_tracking_main ();
10411 }
10412
10413 }; // class pass_variable_tracking
10414
10415 } // anon namespace
10416
10417 rtl_opt_pass *
10418 make_pass_variable_tracking (gcc::context *ctxt)
10419 {
10420 return new pass_variable_tracking (ctxt);
10421 }