re PR target/44942 (Bug in argument passing of long double)
[gcc.git] / gcc / tree-flow-inline.h
1 /* Inline functions for tree-flow.h
2 Copyright (C) 2001, 2003, 2005, 2006, 2007, 2008, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #ifndef _TREE_FLOW_INLINE_H
23 #define _TREE_FLOW_INLINE_H 1
24
25 /* Inline functions for manipulating various data structures defined in
26 tree-flow.h. See tree-flow.h for documentation. */
27
28 /* Return true when gimple SSA form was built.
29 gimple_in_ssa_p is queried by gimplifier in various early stages before SSA
30 infrastructure is initialized. Check for presence of the datastructures
31 at first place. */
32 static inline bool
33 gimple_in_ssa_p (const struct function *fun)
34 {
35 return fun && fun->gimple_df && fun->gimple_df->in_ssa_p;
36 }
37
38 /* Array of all variables referenced in the function. */
39 static inline htab_t
40 gimple_referenced_vars (const struct function *fun)
41 {
42 if (!fun->gimple_df)
43 return NULL;
44 return fun->gimple_df->referenced_vars;
45 }
46
47 /* Artificial variable used for the virtual operand FUD chain. */
48 static inline tree
49 gimple_vop (const struct function *fun)
50 {
51 gcc_checking_assert (fun && fun->gimple_df);
52 return fun->gimple_df->vop;
53 }
54
55 /* Initialize the hashtable iterator HTI to point to hashtable TABLE */
56
57 static inline void *
58 first_htab_element (htab_iterator *hti, htab_t table)
59 {
60 hti->htab = table;
61 hti->slot = table->entries;
62 hti->limit = hti->slot + htab_size (table);
63 do
64 {
65 PTR x = *(hti->slot);
66 if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
67 break;
68 } while (++(hti->slot) < hti->limit);
69
70 if (hti->slot < hti->limit)
71 return *(hti->slot);
72 return NULL;
73 }
74
75 /* Return current non-empty/deleted slot of the hashtable pointed to by HTI,
76 or NULL if we have reached the end. */
77
78 static inline bool
79 end_htab_p (const htab_iterator *hti)
80 {
81 if (hti->slot >= hti->limit)
82 return true;
83 return false;
84 }
85
86 /* Advance the hashtable iterator pointed to by HTI to the next element of the
87 hashtable. */
88
89 static inline void *
90 next_htab_element (htab_iterator *hti)
91 {
92 while (++(hti->slot) < hti->limit)
93 {
94 PTR x = *(hti->slot);
95 if (x != HTAB_EMPTY_ENTRY && x != HTAB_DELETED_ENTRY)
96 return x;
97 };
98 return NULL;
99 }
100
101 /* Initialize ITER to point to the first referenced variable in the
102 referenced_vars hashtable, and return that variable. */
103
104 static inline tree
105 first_referenced_var (referenced_var_iterator *iter)
106 {
107 return (tree) first_htab_element (&iter->hti,
108 gimple_referenced_vars (cfun));
109 }
110
111 /* Return true if we have hit the end of the referenced variables ITER is
112 iterating through. */
113
114 static inline bool
115 end_referenced_vars_p (const referenced_var_iterator *iter)
116 {
117 return end_htab_p (&iter->hti);
118 }
119
120 /* Make ITER point to the next referenced_var in the referenced_var hashtable,
121 and return that variable. */
122
123 static inline tree
124 next_referenced_var (referenced_var_iterator *iter)
125 {
126 return (tree) next_htab_element (&iter->hti);
127 }
128
129 /* Return the variable annotation for T, which must be a _DECL node.
130 Return NULL if the variable annotation doesn't already exist. */
131 static inline var_ann_t
132 var_ann (const_tree t)
133 {
134 const var_ann_t *p = DECL_VAR_ANN_PTR (t);
135 return p ? *p : NULL;
136 }
137
138 /* Return the variable annotation for T, which must be a _DECL node.
139 Create the variable annotation if it doesn't exist. */
140 static inline var_ann_t
141 get_var_ann (tree var)
142 {
143 var_ann_t *p = DECL_VAR_ANN_PTR (var);
144 gcc_checking_assert (p);
145 return *p ? *p : create_var_ann (var);
146 }
147
148 /* Get the number of the next statement uid to be allocated. */
149 static inline unsigned int
150 gimple_stmt_max_uid (struct function *fn)
151 {
152 return fn->last_stmt_uid;
153 }
154
155 /* Set the number of the next statement uid to be allocated. */
156 static inline void
157 set_gimple_stmt_max_uid (struct function *fn, unsigned int maxid)
158 {
159 fn->last_stmt_uid = maxid;
160 }
161
162 /* Set the number of the next statement uid to be allocated. */
163 static inline unsigned int
164 inc_gimple_stmt_max_uid (struct function *fn)
165 {
166 return fn->last_stmt_uid++;
167 }
168
169 /* Return the line number for EXPR, or return -1 if we have no line
170 number information for it. */
171 static inline int
172 get_lineno (const_gimple stmt)
173 {
174 location_t loc;
175
176 if (!stmt)
177 return -1;
178
179 loc = gimple_location (stmt);
180 if (loc == UNKNOWN_LOCATION)
181 return -1;
182
183 return LOCATION_LINE (loc);
184 }
185
186 /* Delink an immediate_uses node from its chain. */
187 static inline void
188 delink_imm_use (ssa_use_operand_t *linknode)
189 {
190 /* Return if this node is not in a list. */
191 if (linknode->prev == NULL)
192 return;
193
194 linknode->prev->next = linknode->next;
195 linknode->next->prev = linknode->prev;
196 linknode->prev = NULL;
197 linknode->next = NULL;
198 }
199
200 /* Link ssa_imm_use node LINKNODE into the chain for LIST. */
201 static inline void
202 link_imm_use_to_list (ssa_use_operand_t *linknode, ssa_use_operand_t *list)
203 {
204 /* Link the new node at the head of the list. If we are in the process of
205 traversing the list, we won't visit any new nodes added to it. */
206 linknode->prev = list;
207 linknode->next = list->next;
208 list->next->prev = linknode;
209 list->next = linknode;
210 }
211
212 /* Link ssa_imm_use node LINKNODE into the chain for DEF. */
213 static inline void
214 link_imm_use (ssa_use_operand_t *linknode, tree def)
215 {
216 ssa_use_operand_t *root;
217
218 if (!def || TREE_CODE (def) != SSA_NAME)
219 linknode->prev = NULL;
220 else
221 {
222 root = &(SSA_NAME_IMM_USE_NODE (def));
223 #ifdef ENABLE_CHECKING
224 if (linknode->use)
225 gcc_checking_assert (*(linknode->use) == def);
226 #endif
227 link_imm_use_to_list (linknode, root);
228 }
229 }
230
231 /* Set the value of a use pointed to by USE to VAL. */
232 static inline void
233 set_ssa_use_from_ptr (use_operand_p use, tree val)
234 {
235 delink_imm_use (use);
236 *(use->use) = val;
237 link_imm_use (use, val);
238 }
239
240 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring
241 in STMT. */
242 static inline void
243 link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
244 {
245 if (stmt)
246 link_imm_use (linknode, def);
247 else
248 link_imm_use (linknode, NULL);
249 linknode->loc.stmt = stmt;
250 }
251
252 /* Relink a new node in place of an old node in the list. */
253 static inline void
254 relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
255 {
256 /* The node one had better be in the same list. */
257 gcc_checking_assert (*(old->use) == *(node->use));
258 node->prev = old->prev;
259 node->next = old->next;
260 if (old->prev)
261 {
262 old->prev->next = node;
263 old->next->prev = node;
264 /* Remove the old node from the list. */
265 old->prev = NULL;
266 }
267 }
268
269 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring
270 in STMT. */
271 static inline void
272 relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
273 gimple stmt)
274 {
275 if (stmt)
276 relink_imm_use (linknode, old);
277 else
278 link_imm_use (linknode, NULL);
279 linknode->loc.stmt = stmt;
280 }
281
282
283 /* Return true is IMM has reached the end of the immediate use list. */
284 static inline bool
285 end_readonly_imm_use_p (const imm_use_iterator *imm)
286 {
287 return (imm->imm_use == imm->end_p);
288 }
289
290 /* Initialize iterator IMM to process the list for VAR. */
291 static inline use_operand_p
292 first_readonly_imm_use (imm_use_iterator *imm, tree var)
293 {
294 imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
295 imm->imm_use = imm->end_p->next;
296 #ifdef ENABLE_CHECKING
297 imm->iter_node.next = imm->imm_use->next;
298 #endif
299 if (end_readonly_imm_use_p (imm))
300 return NULL_USE_OPERAND_P;
301 return imm->imm_use;
302 }
303
304 /* Bump IMM to the next use in the list. */
305 static inline use_operand_p
306 next_readonly_imm_use (imm_use_iterator *imm)
307 {
308 use_operand_p old = imm->imm_use;
309
310 #ifdef ENABLE_CHECKING
311 /* If this assertion fails, it indicates the 'next' pointer has changed
312 since the last bump. This indicates that the list is being modified
313 via stmt changes, or SET_USE, or somesuch thing, and you need to be
314 using the SAFE version of the iterator. */
315 gcc_assert (imm->iter_node.next == old->next);
316 imm->iter_node.next = old->next->next;
317 #endif
318
319 imm->imm_use = old->next;
320 if (end_readonly_imm_use_p (imm))
321 return NULL_USE_OPERAND_P;
322 return imm->imm_use;
323 }
324
325 /* tree-cfg.c */
326 extern bool has_zero_uses_1 (const ssa_use_operand_t *head);
327 extern bool single_imm_use_1 (const ssa_use_operand_t *head,
328 use_operand_p *use_p, gimple *stmt);
329
330 /* Return true if VAR has no nondebug uses. */
331 static inline bool
332 has_zero_uses (const_tree var)
333 {
334 const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
335
336 /* A single use_operand means there is no items in the list. */
337 if (ptr == ptr->next)
338 return true;
339
340 /* If there are debug stmts, we have to look at each use and see
341 whether there are any nondebug uses. */
342 if (!MAY_HAVE_DEBUG_STMTS)
343 return false;
344
345 return has_zero_uses_1 (ptr);
346 }
347
348 /* Return true if VAR has a single nondebug use. */
349 static inline bool
350 has_single_use (const_tree var)
351 {
352 const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
353
354 /* If there aren't any uses whatsoever, we're done. */
355 if (ptr == ptr->next)
356 return false;
357
358 /* If there's a single use, check that it's not a debug stmt. */
359 if (ptr == ptr->next->next)
360 return !is_gimple_debug (USE_STMT (ptr->next));
361
362 /* If there are debug stmts, we have to look at each of them. */
363 if (!MAY_HAVE_DEBUG_STMTS)
364 return false;
365
366 return single_imm_use_1 (ptr, NULL, NULL);
367 }
368
369
370 /* If VAR has only a single immediate nondebug use, return true, and
371 set USE_P and STMT to the use pointer and stmt of occurrence. */
372 static inline bool
373 single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
374 {
375 const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
376
377 /* If there aren't any uses whatsoever, we're done. */
378 if (ptr == ptr->next)
379 {
380 return_false:
381 *use_p = NULL_USE_OPERAND_P;
382 *stmt = NULL;
383 return false;
384 }
385
386 /* If there's a single use, check that it's not a debug stmt. */
387 if (ptr == ptr->next->next)
388 {
389 if (!is_gimple_debug (USE_STMT (ptr->next)))
390 {
391 *use_p = ptr->next;
392 *stmt = ptr->next->loc.stmt;
393 return true;
394 }
395 else
396 goto return_false;
397 }
398
399 /* If there are debug stmts, we have to look at each of them. */
400 if (!MAY_HAVE_DEBUG_STMTS)
401 goto return_false;
402
403 return single_imm_use_1 (ptr, use_p, stmt);
404 }
405
406 /* Return the number of nondebug immediate uses of VAR. */
407 static inline unsigned int
408 num_imm_uses (const_tree var)
409 {
410 const ssa_use_operand_t *const start = &(SSA_NAME_IMM_USE_NODE (var));
411 const ssa_use_operand_t *ptr;
412 unsigned int num = 0;
413
414 if (!MAY_HAVE_DEBUG_STMTS)
415 for (ptr = start->next; ptr != start; ptr = ptr->next)
416 num++;
417 else
418 for (ptr = start->next; ptr != start; ptr = ptr->next)
419 if (!is_gimple_debug (USE_STMT (ptr)))
420 num++;
421
422 return num;
423 }
424
425 /* Return the tree pointed-to by USE. */
426 static inline tree
427 get_use_from_ptr (use_operand_p use)
428 {
429 return *(use->use);
430 }
431
432 /* Return the tree pointed-to by DEF. */
433 static inline tree
434 get_def_from_ptr (def_operand_p def)
435 {
436 return *def;
437 }
438
439 /* Return a use_operand_p pointer for argument I of PHI node GS. */
440
441 static inline use_operand_p
442 gimple_phi_arg_imm_use_ptr (gimple gs, int i)
443 {
444 return &gimple_phi_arg (gs, i)->imm_use;
445 }
446
447 /* Return the tree operand for argument I of PHI node GS. */
448
449 static inline tree
450 gimple_phi_arg_def (gimple gs, size_t index)
451 {
452 struct phi_arg_d *pd = gimple_phi_arg (gs, index);
453 return get_use_from_ptr (&pd->imm_use);
454 }
455
456 /* Return a pointer to the tree operand for argument I of PHI node GS. */
457
458 static inline tree *
459 gimple_phi_arg_def_ptr (gimple gs, size_t index)
460 {
461 return &gimple_phi_arg (gs, index)->def;
462 }
463
464 /* Return the edge associated with argument I of phi node GS. */
465
466 static inline edge
467 gimple_phi_arg_edge (gimple gs, size_t i)
468 {
469 return EDGE_PRED (gimple_bb (gs), i);
470 }
471
472 /* Return the source location of gimple argument I of phi node GS. */
473
474 static inline source_location
475 gimple_phi_arg_location (gimple gs, size_t i)
476 {
477 return gimple_phi_arg (gs, i)->locus;
478 }
479
480 /* Return the source location of the argument on edge E of phi node GS. */
481
482 static inline source_location
483 gimple_phi_arg_location_from_edge (gimple gs, edge e)
484 {
485 return gimple_phi_arg (gs, e->dest_idx)->locus;
486 }
487
488 /* Set the source location of gimple argument I of phi node GS to LOC. */
489
490 static inline void
491 gimple_phi_arg_set_location (gimple gs, size_t i, source_location loc)
492 {
493 gimple_phi_arg (gs, i)->locus = loc;
494 }
495
496 /* Return TRUE if argument I of phi node GS has a location record. */
497
498 static inline bool
499 gimple_phi_arg_has_location (gimple gs, size_t i)
500 {
501 return gimple_phi_arg_location (gs, i) != UNKNOWN_LOCATION;
502 }
503
504
505 /* Return the PHI nodes for basic block BB, or NULL if there are no
506 PHI nodes. */
507 static inline gimple_seq
508 phi_nodes (const_basic_block bb)
509 {
510 gcc_checking_assert (!(bb->flags & BB_RTL));
511 if (!bb->il.gimple)
512 return NULL;
513 return bb->il.gimple->phi_nodes;
514 }
515
516 /* Set PHI nodes of a basic block BB to SEQ. */
517
518 static inline void
519 set_phi_nodes (basic_block bb, gimple_seq seq)
520 {
521 gimple_stmt_iterator i;
522
523 gcc_checking_assert (!(bb->flags & BB_RTL));
524 bb->il.gimple->phi_nodes = seq;
525 if (seq)
526 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
527 gimple_set_bb (gsi_stmt (i), bb);
528 }
529
530 /* Return the phi argument which contains the specified use. */
531
532 static inline int
533 phi_arg_index_from_use (use_operand_p use)
534 {
535 struct phi_arg_d *element, *root;
536 size_t index;
537 gimple phi;
538
539 /* Since the use is the first thing in a PHI argument element, we can
540 calculate its index based on casting it to an argument, and performing
541 pointer arithmetic. */
542
543 phi = USE_STMT (use);
544
545 element = (struct phi_arg_d *)use;
546 root = gimple_phi_arg (phi, 0);
547 index = element - root;
548
549 #ifdef ENABLE_CHECKING
550 /* Make sure the calculation doesn't have any leftover bytes. If it does,
551 then imm_use is likely not the first element in phi_arg_d. */
552 gcc_assert ((((char *)element - (char *)root)
553 % sizeof (struct phi_arg_d)) == 0
554 && index < gimple_phi_capacity (phi));
555 #endif
556
557 return index;
558 }
559
560 /* Mark VAR as used, so that it'll be preserved during rtl expansion. */
561
562 static inline void
563 set_is_used (tree var)
564 {
565 var_ann_t ann = get_var_ann (var);
566 ann->used = 1;
567 }
568
569
570 /* Return true if T (assumed to be a DECL) is a global variable.
571 A variable is considered global if its storage is not automatic. */
572
573 static inline bool
574 is_global_var (const_tree t)
575 {
576 return (TREE_STATIC (t) || DECL_EXTERNAL (t));
577 }
578
579
580 /* Return true if VAR may be aliased. A variable is considered as
581 maybe aliased if it has its address taken by the local TU
582 or possibly by another TU and might be modified through a pointer. */
583
584 static inline bool
585 may_be_aliased (const_tree var)
586 {
587 return (TREE_CODE (var) != CONST_DECL
588 && !((TREE_STATIC (var) || TREE_PUBLIC (var) || DECL_EXTERNAL (var))
589 && TREE_READONLY (var)
590 && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (var)))
591 && (TREE_PUBLIC (var)
592 || DECL_EXTERNAL (var)
593 || TREE_ADDRESSABLE (var)));
594 }
595
596
597 /* PHI nodes should contain only ssa_names and invariants. A test
598 for ssa_name is definitely simpler; don't let invalid contents
599 slip in in the meantime. */
600
601 static inline bool
602 phi_ssa_name_p (const_tree t)
603 {
604 if (TREE_CODE (t) == SSA_NAME)
605 return true;
606 #ifdef ENABLE_CHECKING
607 gcc_assert (is_gimple_min_invariant (t));
608 #endif
609 return false;
610 }
611
612
613 /* Returns the loop of the statement STMT. */
614
615 static inline struct loop *
616 loop_containing_stmt (gimple stmt)
617 {
618 basic_block bb = gimple_bb (stmt);
619 if (!bb)
620 return NULL;
621
622 return bb->loop_father;
623 }
624
625
626 /* ----------------------------------------------------------------------- */
627
628 /* The following set of routines are used to iterator over various type of
629 SSA operands. */
630
631 /* Return true if PTR is finished iterating. */
632 static inline bool
633 op_iter_done (const ssa_op_iter *ptr)
634 {
635 return ptr->done;
636 }
637
638 /* Get the next iterator use value for PTR. */
639 static inline use_operand_p
640 op_iter_next_use (ssa_op_iter *ptr)
641 {
642 use_operand_p use_p;
643 gcc_checking_assert (ptr->iter_type == ssa_op_iter_use);
644 if (ptr->uses)
645 {
646 use_p = USE_OP_PTR (ptr->uses);
647 ptr->uses = ptr->uses->next;
648 return use_p;
649 }
650 if (ptr->phi_i < ptr->num_phi)
651 {
652 return PHI_ARG_DEF_PTR (ptr->phi_stmt, (ptr->phi_i)++);
653 }
654 ptr->done = true;
655 return NULL_USE_OPERAND_P;
656 }
657
658 /* Get the next iterator def value for PTR. */
659 static inline def_operand_p
660 op_iter_next_def (ssa_op_iter *ptr)
661 {
662 def_operand_p def_p;
663 gcc_checking_assert (ptr->iter_type == ssa_op_iter_def);
664 if (ptr->defs)
665 {
666 def_p = DEF_OP_PTR (ptr->defs);
667 ptr->defs = ptr->defs->next;
668 return def_p;
669 }
670 ptr->done = true;
671 return NULL_DEF_OPERAND_P;
672 }
673
674 /* Get the next iterator tree value for PTR. */
675 static inline tree
676 op_iter_next_tree (ssa_op_iter *ptr)
677 {
678 tree val;
679 gcc_checking_assert (ptr->iter_type == ssa_op_iter_tree);
680 if (ptr->uses)
681 {
682 val = USE_OP (ptr->uses);
683 ptr->uses = ptr->uses->next;
684 return val;
685 }
686 if (ptr->defs)
687 {
688 val = DEF_OP (ptr->defs);
689 ptr->defs = ptr->defs->next;
690 return val;
691 }
692
693 ptr->done = true;
694 return NULL_TREE;
695
696 }
697
698
699 /* This functions clears the iterator PTR, and marks it done. This is normally
700 used to prevent warnings in the compile about might be uninitialized
701 components. */
702
703 static inline void
704 clear_and_done_ssa_iter (ssa_op_iter *ptr)
705 {
706 ptr->defs = NULL;
707 ptr->uses = NULL;
708 ptr->iter_type = ssa_op_iter_none;
709 ptr->phi_i = 0;
710 ptr->num_phi = 0;
711 ptr->phi_stmt = NULL;
712 ptr->done = true;
713 }
714
715 /* Initialize the iterator PTR to the virtual defs in STMT. */
716 static inline void
717 op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
718 {
719 /* We do not support iterating over virtual defs or uses without
720 iterating over defs or uses at the same time. */
721 gcc_checking_assert ((!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF))
722 && (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE)));
723 ptr->defs = (flags & (SSA_OP_DEF|SSA_OP_VDEF)) ? gimple_def_ops (stmt) : NULL;
724 if (!(flags & SSA_OP_VDEF)
725 && ptr->defs
726 && gimple_vdef (stmt) != NULL_TREE)
727 ptr->defs = ptr->defs->next;
728 ptr->uses = (flags & (SSA_OP_USE|SSA_OP_VUSE)) ? gimple_use_ops (stmt) : NULL;
729 if (!(flags & SSA_OP_VUSE)
730 && ptr->uses
731 && gimple_vuse (stmt) != NULL_TREE)
732 ptr->uses = ptr->uses->next;
733 ptr->done = false;
734
735 ptr->phi_i = 0;
736 ptr->num_phi = 0;
737 ptr->phi_stmt = NULL;
738 }
739
740 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
741 the first use. */
742 static inline use_operand_p
743 op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
744 {
745 gcc_checking_assert ((flags & SSA_OP_ALL_DEFS) == 0
746 && (flags & SSA_OP_USE));
747 op_iter_init (ptr, stmt, flags);
748 ptr->iter_type = ssa_op_iter_use;
749 return op_iter_next_use (ptr);
750 }
751
752 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
753 the first def. */
754 static inline def_operand_p
755 op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
756 {
757 gcc_checking_assert ((flags & SSA_OP_ALL_USES) == 0
758 && (flags & SSA_OP_DEF));
759 op_iter_init (ptr, stmt, flags);
760 ptr->iter_type = ssa_op_iter_def;
761 return op_iter_next_def (ptr);
762 }
763
764 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
765 the first operand as a tree. */
766 static inline tree
767 op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
768 {
769 op_iter_init (ptr, stmt, flags);
770 ptr->iter_type = ssa_op_iter_tree;
771 return op_iter_next_tree (ptr);
772 }
773
774
775 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
776 return NULL. */
777 static inline tree
778 single_ssa_tree_operand (gimple stmt, int flags)
779 {
780 tree var;
781 ssa_op_iter iter;
782
783 var = op_iter_init_tree (&iter, stmt, flags);
784 if (op_iter_done (&iter))
785 return NULL_TREE;
786 op_iter_next_tree (&iter);
787 if (op_iter_done (&iter))
788 return var;
789 return NULL_TREE;
790 }
791
792
793 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
794 return NULL. */
795 static inline use_operand_p
796 single_ssa_use_operand (gimple stmt, int flags)
797 {
798 use_operand_p var;
799 ssa_op_iter iter;
800
801 var = op_iter_init_use (&iter, stmt, flags);
802 if (op_iter_done (&iter))
803 return NULL_USE_OPERAND_P;
804 op_iter_next_use (&iter);
805 if (op_iter_done (&iter))
806 return var;
807 return NULL_USE_OPERAND_P;
808 }
809
810
811
812 /* If there is a single operand in STMT matching FLAGS, return it. Otherwise
813 return NULL. */
814 static inline def_operand_p
815 single_ssa_def_operand (gimple stmt, int flags)
816 {
817 def_operand_p var;
818 ssa_op_iter iter;
819
820 var = op_iter_init_def (&iter, stmt, flags);
821 if (op_iter_done (&iter))
822 return NULL_DEF_OPERAND_P;
823 op_iter_next_def (&iter);
824 if (op_iter_done (&iter))
825 return var;
826 return NULL_DEF_OPERAND_P;
827 }
828
829
830 /* Return true if there are zero operands in STMT matching the type
831 given in FLAGS. */
832 static inline bool
833 zero_ssa_operands (gimple stmt, int flags)
834 {
835 ssa_op_iter iter;
836
837 op_iter_init_tree (&iter, stmt, flags);
838 return op_iter_done (&iter);
839 }
840
841
842 /* Return the number of operands matching FLAGS in STMT. */
843 static inline int
844 num_ssa_operands (gimple stmt, int flags)
845 {
846 ssa_op_iter iter;
847 tree t;
848 int num = 0;
849
850 FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, flags)
851 num++;
852 return num;
853 }
854
855
856 /* Delink all immediate_use information for STMT. */
857 static inline void
858 delink_stmt_imm_use (gimple stmt)
859 {
860 ssa_op_iter iter;
861 use_operand_p use_p;
862
863 if (ssa_operands_active ())
864 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
865 delink_imm_use (use_p);
866 }
867
868
869 /* If there is a single DEF in the PHI node which matches FLAG, return it.
870 Otherwise return NULL_DEF_OPERAND_P. */
871 static inline tree
872 single_phi_def (gimple stmt, int flags)
873 {
874 tree def = PHI_RESULT (stmt);
875 if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
876 return def;
877 if ((flags & SSA_OP_VIRTUAL_DEFS) && !is_gimple_reg (def))
878 return def;
879 return NULL_TREE;
880 }
881
882 /* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
883 be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
884 static inline use_operand_p
885 op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
886 {
887 tree phi_def = gimple_phi_result (phi);
888 int comp;
889
890 clear_and_done_ssa_iter (ptr);
891 ptr->done = false;
892
893 gcc_checking_assert ((flags & (SSA_OP_USE | SSA_OP_VIRTUAL_USES)) != 0);
894
895 comp = (is_gimple_reg (phi_def) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
896
897 /* If the PHI node doesn't the operand type we care about, we're done. */
898 if ((flags & comp) == 0)
899 {
900 ptr->done = true;
901 return NULL_USE_OPERAND_P;
902 }
903
904 ptr->phi_stmt = phi;
905 ptr->num_phi = gimple_phi_num_args (phi);
906 ptr->iter_type = ssa_op_iter_use;
907 return op_iter_next_use (ptr);
908 }
909
910
911 /* Start an iterator for a PHI definition. */
912
913 static inline def_operand_p
914 op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
915 {
916 tree phi_def = PHI_RESULT (phi);
917 int comp;
918
919 clear_and_done_ssa_iter (ptr);
920 ptr->done = false;
921
922 gcc_checking_assert ((flags & (SSA_OP_DEF | SSA_OP_VIRTUAL_DEFS)) != 0);
923
924 comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
925
926 /* If the PHI node doesn't have the operand type we care about,
927 we're done. */
928 if ((flags & comp) == 0)
929 {
930 ptr->done = true;
931 return NULL_DEF_OPERAND_P;
932 }
933
934 ptr->iter_type = ssa_op_iter_def;
935 /* The first call to op_iter_next_def will terminate the iterator since
936 all the fields are NULL. Simply return the result here as the first and
937 therefore only result. */
938 return PHI_RESULT_PTR (phi);
939 }
940
941 /* Return true is IMM has reached the end of the immediate use stmt list. */
942
943 static inline bool
944 end_imm_use_stmt_p (const imm_use_iterator *imm)
945 {
946 return (imm->imm_use == imm->end_p);
947 }
948
949 /* Finished the traverse of an immediate use stmt list IMM by removing the
950 placeholder node from the list. */
951
952 static inline void
953 end_imm_use_stmt_traverse (imm_use_iterator *imm)
954 {
955 delink_imm_use (&(imm->iter_node));
956 }
957
958 /* Immediate use traversal of uses within a stmt require that all the
959 uses on a stmt be sequentially listed. This routine is used to build up
960 this sequential list by adding USE_P to the end of the current list
961 currently delimited by HEAD and LAST_P. The new LAST_P value is
962 returned. */
963
964 static inline use_operand_p
965 move_use_after_head (use_operand_p use_p, use_operand_p head,
966 use_operand_p last_p)
967 {
968 #ifdef ENABLE_CHECKING
969 gcc_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
970 #endif
971 /* Skip head when we find it. */
972 if (use_p != head)
973 {
974 /* If use_p is already linked in after last_p, continue. */
975 if (last_p->next == use_p)
976 last_p = use_p;
977 else
978 {
979 /* Delink from current location, and link in at last_p. */
980 delink_imm_use (use_p);
981 link_imm_use_to_list (use_p, last_p);
982 last_p = use_p;
983 }
984 }
985 return last_p;
986 }
987
988
989 /* This routine will relink all uses with the same stmt as HEAD into the list
990 immediately following HEAD for iterator IMM. */
991
992 static inline void
993 link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
994 {
995 use_operand_p use_p;
996 use_operand_p last_p = head;
997 gimple head_stmt = USE_STMT (head);
998 tree use = USE_FROM_PTR (head);
999 ssa_op_iter op_iter;
1000 int flag;
1001
1002 /* Only look at virtual or real uses, depending on the type of HEAD. */
1003 flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
1004
1005 if (gimple_code (head_stmt) == GIMPLE_PHI)
1006 {
1007 FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
1008 if (USE_FROM_PTR (use_p) == use)
1009 last_p = move_use_after_head (use_p, head, last_p);
1010 }
1011 else
1012 {
1013 if (flag == SSA_OP_USE)
1014 {
1015 FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
1016 if (USE_FROM_PTR (use_p) == use)
1017 last_p = move_use_after_head (use_p, head, last_p);
1018 }
1019 else if ((use_p = gimple_vuse_op (head_stmt)) != NULL_USE_OPERAND_P)
1020 {
1021 if (USE_FROM_PTR (use_p) == use)
1022 last_p = move_use_after_head (use_p, head, last_p);
1023 }
1024 }
1025 /* Link iter node in after last_p. */
1026 if (imm->iter_node.prev != NULL)
1027 delink_imm_use (&imm->iter_node);
1028 link_imm_use_to_list (&(imm->iter_node), last_p);
1029 }
1030
1031 /* Initialize IMM to traverse over uses of VAR. Return the first statement. */
1032 static inline gimple
1033 first_imm_use_stmt (imm_use_iterator *imm, tree var)
1034 {
1035 imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
1036 imm->imm_use = imm->end_p->next;
1037 imm->next_imm_name = NULL_USE_OPERAND_P;
1038
1039 /* iter_node is used as a marker within the immediate use list to indicate
1040 where the end of the current stmt's uses are. Initialize it to NULL
1041 stmt and use, which indicates a marker node. */
1042 imm->iter_node.prev = NULL_USE_OPERAND_P;
1043 imm->iter_node.next = NULL_USE_OPERAND_P;
1044 imm->iter_node.loc.stmt = NULL;
1045 imm->iter_node.use = NULL;
1046
1047 if (end_imm_use_stmt_p (imm))
1048 return NULL;
1049
1050 link_use_stmts_after (imm->imm_use, imm);
1051
1052 return USE_STMT (imm->imm_use);
1053 }
1054
1055 /* Bump IMM to the next stmt which has a use of var. */
1056
1057 static inline gimple
1058 next_imm_use_stmt (imm_use_iterator *imm)
1059 {
1060 imm->imm_use = imm->iter_node.next;
1061 if (end_imm_use_stmt_p (imm))
1062 {
1063 if (imm->iter_node.prev != NULL)
1064 delink_imm_use (&imm->iter_node);
1065 return NULL;
1066 }
1067
1068 link_use_stmts_after (imm->imm_use, imm);
1069 return USE_STMT (imm->imm_use);
1070 }
1071
1072 /* This routine will return the first use on the stmt IMM currently refers
1073 to. */
1074
1075 static inline use_operand_p
1076 first_imm_use_on_stmt (imm_use_iterator *imm)
1077 {
1078 imm->next_imm_name = imm->imm_use->next;
1079 return imm->imm_use;
1080 }
1081
1082 /* Return TRUE if the last use on the stmt IMM refers to has been visited. */
1083
1084 static inline bool
1085 end_imm_use_on_stmt_p (const imm_use_iterator *imm)
1086 {
1087 return (imm->imm_use == &(imm->iter_node));
1088 }
1089
1090 /* Bump to the next use on the stmt IMM refers to, return NULL if done. */
1091
1092 static inline use_operand_p
1093 next_imm_use_on_stmt (imm_use_iterator *imm)
1094 {
1095 imm->imm_use = imm->next_imm_name;
1096 if (end_imm_use_on_stmt_p (imm))
1097 return NULL_USE_OPERAND_P;
1098 else
1099 {
1100 imm->next_imm_name = imm->imm_use->next;
1101 return imm->imm_use;
1102 }
1103 }
1104
1105 /* Return true if VAR cannot be modified by the program. */
1106
1107 static inline bool
1108 unmodifiable_var_p (const_tree var)
1109 {
1110 if (TREE_CODE (var) == SSA_NAME)
1111 var = SSA_NAME_VAR (var);
1112
1113 return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var));
1114 }
1115
1116 /* Return true if REF, a handled component reference, has an ARRAY_REF
1117 somewhere in it. */
1118
1119 static inline bool
1120 ref_contains_array_ref (const_tree ref)
1121 {
1122 gcc_checking_assert (handled_component_p (ref));
1123
1124 do {
1125 if (TREE_CODE (ref) == ARRAY_REF)
1126 return true;
1127 ref = TREE_OPERAND (ref, 0);
1128 } while (handled_component_p (ref));
1129
1130 return false;
1131 }
1132
1133 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1134
1135 static inline bool
1136 contains_view_convert_expr_p (const_tree ref)
1137 {
1138 while (handled_component_p (ref))
1139 {
1140 if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1141 return true;
1142 ref = TREE_OPERAND (ref, 0);
1143 }
1144
1145 return false;
1146 }
1147
1148 /* Return true, if the two ranges [POS1, SIZE1] and [POS2, SIZE2]
1149 overlap. SIZE1 and/or SIZE2 can be (unsigned)-1 in which case the
1150 range is open-ended. Otherwise return false. */
1151
1152 static inline bool
1153 ranges_overlap_p (unsigned HOST_WIDE_INT pos1,
1154 unsigned HOST_WIDE_INT size1,
1155 unsigned HOST_WIDE_INT pos2,
1156 unsigned HOST_WIDE_INT size2)
1157 {
1158 if (pos1 >= pos2
1159 && (size2 == (unsigned HOST_WIDE_INT)-1
1160 || pos1 < (pos2 + size2)))
1161 return true;
1162 if (pos2 >= pos1
1163 && (size1 == (unsigned HOST_WIDE_INT)-1
1164 || pos2 < (pos1 + size1)))
1165 return true;
1166
1167 return false;
1168 }
1169
1170 /* Accessor to tree-ssa-operands.c caches. */
1171 static inline struct ssa_operands *
1172 gimple_ssa_operands (const struct function *fun)
1173 {
1174 return &fun->gimple_df->ssa_operands;
1175 }
1176
1177 /* Given an edge_var_map V, return the PHI arg definition. */
1178
1179 static inline tree
1180 redirect_edge_var_map_def (edge_var_map *v)
1181 {
1182 return v->def;
1183 }
1184
1185 /* Given an edge_var_map V, return the PHI result. */
1186
1187 static inline tree
1188 redirect_edge_var_map_result (edge_var_map *v)
1189 {
1190 return v->result;
1191 }
1192
1193 /* Given an edge_var_map V, return the PHI arg location. */
1194
1195 static inline source_location
1196 redirect_edge_var_map_location (edge_var_map *v)
1197 {
1198 return v->locus;
1199 }
1200
1201
1202 /* Return an SSA_NAME node for variable VAR defined in statement STMT
1203 in function cfun. */
1204
1205 static inline tree
1206 make_ssa_name (tree var, gimple stmt)
1207 {
1208 return make_ssa_name_fn (cfun, var, stmt);
1209 }
1210
1211 #endif /* _TREE_FLOW_INLINE_H */