[ARM/AArch64][testsuite] Add vmull tests.
[gcc.git] / gcc / gimple-walk.c
1 /* Gimple walk support.
2
3 Copyright (C) 2007-2015 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stmt.h"
38 #include "predict.h"
39 #include "hard-reg-set.h"
40 #include "input.h"
41 #include "function.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
46 #include "is-a.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "gimple-walk.h"
51 #include "demangle.h"
52
53 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
54 on each one. WI is as in walk_gimple_stmt.
55
56 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
57 value is stored in WI->CALLBACK_RESULT. Also, the statement that
58 produced the value is returned if this statement has not been
59 removed by a callback (wi->removed_stmt). If the statement has
60 been removed, NULL is returned.
61
62 Otherwise, all the statements are walked and NULL returned. */
63
64 gimple
65 walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
66 walk_tree_fn callback_op, struct walk_stmt_info *wi)
67 {
68 gimple_stmt_iterator gsi;
69
70 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
71 {
72 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
73 if (ret)
74 {
75 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
76 to hold it. */
77 gcc_assert (wi);
78 wi->callback_result = ret;
79
80 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
81 }
82
83 if (!wi->removed_stmt)
84 gsi_next (&gsi);
85 }
86
87 if (wi)
88 wi->callback_result = NULL_TREE;
89
90 return NULL;
91 }
92
93
94 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
95 changed by the callbacks. */
96
97 gimple
98 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
99 walk_tree_fn callback_op, struct walk_stmt_info *wi)
100 {
101 gimple_seq seq2 = seq;
102 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
103 gcc_assert (seq2 == seq);
104 return ret;
105 }
106
107
108 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
109
110 static tree
111 walk_gimple_asm (gasm *stmt, walk_tree_fn callback_op,
112 struct walk_stmt_info *wi)
113 {
114 tree ret, op;
115 unsigned noutputs;
116 const char **oconstraints;
117 unsigned i, n;
118 const char *constraint;
119 bool allows_mem, allows_reg, is_inout;
120
121 noutputs = gimple_asm_noutputs (stmt);
122 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
123
124 if (wi)
125 wi->is_lhs = true;
126
127 for (i = 0; i < noutputs; i++)
128 {
129 op = gimple_asm_output_op (stmt, i);
130 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
131 oconstraints[i] = constraint;
132 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
133 &is_inout);
134 if (wi)
135 wi->val_only = (allows_reg || !allows_mem);
136 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
137 if (ret)
138 return ret;
139 }
140
141 n = gimple_asm_ninputs (stmt);
142 for (i = 0; i < n; i++)
143 {
144 op = gimple_asm_input_op (stmt, i);
145 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
146 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
147 oconstraints, &allows_mem, &allows_reg);
148 if (wi)
149 {
150 wi->val_only = (allows_reg || !allows_mem);
151 /* Although input "m" is not really a LHS, we need a lvalue. */
152 wi->is_lhs = !wi->val_only;
153 }
154 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
155 if (ret)
156 return ret;
157 }
158
159 if (wi)
160 {
161 wi->is_lhs = false;
162 wi->val_only = true;
163 }
164
165 n = gimple_asm_nlabels (stmt);
166 for (i = 0; i < n; i++)
167 {
168 op = gimple_asm_label_op (stmt, i);
169 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
170 if (ret)
171 return ret;
172 }
173
174 return NULL_TREE;
175 }
176
177
178 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
179 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
180
181 CALLBACK_OP is called on each operand of STMT via walk_tree.
182 Additional parameters to walk_tree must be stored in WI. For each operand
183 OP, walk_tree is called as:
184
185 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
186
187 If CALLBACK_OP returns non-NULL for an operand, the remaining
188 operands are not scanned.
189
190 The return value is that returned by the last call to walk_tree, or
191 NULL_TREE if no CALLBACK_OP is specified. */
192
193 tree
194 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
195 struct walk_stmt_info *wi)
196 {
197 hash_set<tree> *pset = (wi) ? wi->pset : NULL;
198 unsigned i;
199 tree ret = NULL_TREE;
200
201 switch (gimple_code (stmt))
202 {
203 case GIMPLE_ASSIGN:
204 /* Walk the RHS operands. If the LHS is of a non-renamable type or
205 is a register variable, we may use a COMPONENT_REF on the RHS. */
206 if (wi)
207 {
208 tree lhs = gimple_assign_lhs (stmt);
209 wi->val_only
210 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
211 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
212 }
213
214 for (i = 1; i < gimple_num_ops (stmt); i++)
215 {
216 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
217 pset);
218 if (ret)
219 return ret;
220 }
221
222 /* Walk the LHS. If the RHS is appropriate for a memory, we
223 may use a COMPONENT_REF on the LHS. */
224 if (wi)
225 {
226 /* If the RHS is of a non-renamable type or is a register variable,
227 we may use a COMPONENT_REF on the LHS. */
228 tree rhs1 = gimple_assign_rhs1 (stmt);
229 wi->val_only
230 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
231 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
232 wi->is_lhs = true;
233 }
234
235 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
236 if (ret)
237 return ret;
238
239 if (wi)
240 {
241 wi->val_only = true;
242 wi->is_lhs = false;
243 }
244 break;
245
246 case GIMPLE_CALL:
247 if (wi)
248 {
249 wi->is_lhs = false;
250 wi->val_only = true;
251 }
252
253 ret = walk_tree (gimple_call_chain_ptr (as_a <gcall *> (stmt)),
254 callback_op, wi, pset);
255 if (ret)
256 return ret;
257
258 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
259 if (ret)
260 return ret;
261
262 for (i = 0; i < gimple_call_num_args (stmt); i++)
263 {
264 if (wi)
265 wi->val_only
266 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
267 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
268 pset);
269 if (ret)
270 return ret;
271 }
272
273 if (gimple_call_lhs (stmt))
274 {
275 if (wi)
276 {
277 wi->is_lhs = true;
278 wi->val_only
279 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
280 }
281
282 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
283 if (ret)
284 return ret;
285 }
286
287 if (wi)
288 {
289 wi->is_lhs = false;
290 wi->val_only = true;
291 }
292 break;
293
294 case GIMPLE_CATCH:
295 ret = walk_tree (gimple_catch_types_ptr (as_a <gcatch *> (stmt)),
296 callback_op, wi, pset);
297 if (ret)
298 return ret;
299 break;
300
301 case GIMPLE_EH_FILTER:
302 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
303 pset);
304 if (ret)
305 return ret;
306 break;
307
308 case GIMPLE_ASM:
309 ret = walk_gimple_asm (as_a <gasm *> (stmt), callback_op, wi);
310 if (ret)
311 return ret;
312 break;
313
314 case GIMPLE_OMP_CONTINUE:
315 {
316 gomp_continue *cont_stmt = as_a <gomp_continue *> (stmt);
317 ret = walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt),
318 callback_op, wi, pset);
319 if (ret)
320 return ret;
321
322 ret = walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt),
323 callback_op, wi, pset);
324 if (ret)
325 return ret;
326 }
327 break;
328
329 case GIMPLE_OMP_CRITICAL:
330 {
331 gomp_critical *omp_stmt = as_a <gomp_critical *> (stmt);
332 ret = walk_tree (gimple_omp_critical_name_ptr (omp_stmt),
333 callback_op, wi, pset);
334 if (ret)
335 return ret;
336 }
337 break;
338
339 case GIMPLE_OMP_FOR:
340 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
341 pset);
342 if (ret)
343 return ret;
344 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
345 {
346 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
347 wi, pset);
348 if (ret)
349 return ret;
350 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
351 wi, pset);
352 if (ret)
353 return ret;
354 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
355 wi, pset);
356 if (ret)
357 return ret;
358 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
359 wi, pset);
360 if (ret)
361 return ret;
362 }
363 break;
364
365 case GIMPLE_OMP_PARALLEL:
366 {
367 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
368 ret = walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt),
369 callback_op, wi, pset);
370 if (ret)
371 return ret;
372 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt),
373 callback_op, wi, pset);
374 if (ret)
375 return ret;
376 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt),
377 callback_op, wi, pset);
378 if (ret)
379 return ret;
380 }
381 break;
382
383 case GIMPLE_OMP_TASK:
384 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
385 wi, pset);
386 if (ret)
387 return ret;
388 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
389 wi, pset);
390 if (ret)
391 return ret;
392 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
393 wi, pset);
394 if (ret)
395 return ret;
396 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
397 wi, pset);
398 if (ret)
399 return ret;
400 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
401 wi, pset);
402 if (ret)
403 return ret;
404 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
405 wi, pset);
406 if (ret)
407 return ret;
408 break;
409
410 case GIMPLE_OMP_SECTIONS:
411 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
412 wi, pset);
413 if (ret)
414 return ret;
415 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
416 wi, pset);
417 if (ret)
418 return ret;
419
420 break;
421
422 case GIMPLE_OMP_SINGLE:
423 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
424 pset);
425 if (ret)
426 return ret;
427 break;
428
429 case GIMPLE_OMP_TARGET:
430 {
431 gomp_target *omp_stmt = as_a <gomp_target *> (stmt);
432 ret = walk_tree (gimple_omp_target_clauses_ptr (omp_stmt),
433 callback_op, wi, pset);
434 if (ret)
435 return ret;
436 ret = walk_tree (gimple_omp_target_child_fn_ptr (omp_stmt),
437 callback_op, wi, pset);
438 if (ret)
439 return ret;
440 ret = walk_tree (gimple_omp_target_data_arg_ptr (omp_stmt),
441 callback_op, wi, pset);
442 if (ret)
443 return ret;
444 }
445 break;
446
447 case GIMPLE_OMP_TEAMS:
448 ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
449 pset);
450 if (ret)
451 return ret;
452 break;
453
454 case GIMPLE_OMP_ATOMIC_LOAD:
455 {
456 gomp_atomic_load *omp_stmt = as_a <gomp_atomic_load *> (stmt);
457 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt),
458 callback_op, wi, pset);
459 if (ret)
460 return ret;
461 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt),
462 callback_op, wi, pset);
463 if (ret)
464 return ret;
465 }
466 break;
467
468 case GIMPLE_OMP_ATOMIC_STORE:
469 {
470 gomp_atomic_store *omp_stmt = as_a <gomp_atomic_store *> (stmt);
471 ret = walk_tree (gimple_omp_atomic_store_val_ptr (omp_stmt),
472 callback_op, wi, pset);
473 if (ret)
474 return ret;
475 }
476 break;
477
478 case GIMPLE_TRANSACTION:
479 ret = walk_tree (gimple_transaction_label_ptr (
480 as_a <gtransaction *> (stmt)),
481 callback_op, wi, pset);
482 if (ret)
483 return ret;
484 break;
485
486 case GIMPLE_OMP_RETURN:
487 ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
488 pset);
489 if (ret)
490 return ret;
491 break;
492
493 /* Tuples that do not have operands. */
494 case GIMPLE_NOP:
495 case GIMPLE_RESX:
496 case GIMPLE_PREDICT:
497 break;
498
499 default:
500 {
501 enum gimple_statement_structure_enum gss;
502 gss = gimple_statement_structure (stmt);
503 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
504 for (i = 0; i < gimple_num_ops (stmt); i++)
505 {
506 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
507 if (ret)
508 return ret;
509 }
510 }
511 break;
512 }
513
514 return NULL_TREE;
515 }
516
517
518 /* Walk the current statement in GSI (optionally using traversal state
519 stored in WI). If WI is NULL, no state is kept during traversal.
520 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
521 that it has handled all the operands of the statement, its return
522 value is returned. Otherwise, the return value from CALLBACK_STMT
523 is discarded and its operands are scanned.
524
525 If CALLBACK_STMT is NULL or it didn't handle the operands,
526 CALLBACK_OP is called on each operand of the statement via
527 walk_gimple_op. If walk_gimple_op returns non-NULL for any
528 operand, the remaining operands are not scanned. In this case, the
529 return value from CALLBACK_OP is returned.
530
531 In any other case, NULL_TREE is returned. */
532
533 tree
534 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
535 walk_tree_fn callback_op, struct walk_stmt_info *wi)
536 {
537 gimple ret;
538 tree tree_ret;
539 gimple stmt = gsi_stmt (*gsi);
540
541 if (wi)
542 {
543 wi->gsi = *gsi;
544 wi->removed_stmt = false;
545
546 if (wi->want_locations && gimple_has_location (stmt))
547 input_location = gimple_location (stmt);
548 }
549
550 ret = NULL;
551
552 /* Invoke the statement callback. Return if the callback handled
553 all of STMT operands by itself. */
554 if (callback_stmt)
555 {
556 bool handled_ops = false;
557 tree_ret = callback_stmt (gsi, &handled_ops, wi);
558 if (handled_ops)
559 return tree_ret;
560
561 /* If CALLBACK_STMT did not handle operands, it should not have
562 a value to return. */
563 gcc_assert (tree_ret == NULL);
564
565 if (wi && wi->removed_stmt)
566 return NULL;
567
568 /* Re-read stmt in case the callback changed it. */
569 stmt = gsi_stmt (*gsi);
570 }
571
572 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
573 if (callback_op)
574 {
575 tree_ret = walk_gimple_op (stmt, callback_op, wi);
576 if (tree_ret)
577 return tree_ret;
578 }
579
580 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
581 switch (gimple_code (stmt))
582 {
583 case GIMPLE_BIND:
584 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (as_a <gbind *> (stmt)),
585 callback_stmt, callback_op, wi);
586 if (ret)
587 return wi->callback_result;
588 break;
589
590 case GIMPLE_CATCH:
591 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (
592 as_a <gcatch *> (stmt)),
593 callback_stmt, callback_op, wi);
594 if (ret)
595 return wi->callback_result;
596 break;
597
598 case GIMPLE_EH_FILTER:
599 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
600 callback_op, wi);
601 if (ret)
602 return wi->callback_result;
603 break;
604
605 case GIMPLE_EH_ELSE:
606 {
607 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
608 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt),
609 callback_stmt, callback_op, wi);
610 if (ret)
611 return wi->callback_result;
612 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt),
613 callback_stmt, callback_op, wi);
614 if (ret)
615 return wi->callback_result;
616 }
617 break;
618
619 case GIMPLE_TRY:
620 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
621 wi);
622 if (ret)
623 return wi->callback_result;
624
625 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
626 callback_op, wi);
627 if (ret)
628 return wi->callback_result;
629 break;
630
631 case GIMPLE_OMP_FOR:
632 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
633 callback_op, wi);
634 if (ret)
635 return wi->callback_result;
636
637 /* FALL THROUGH. */
638 case GIMPLE_OMP_CRITICAL:
639 case GIMPLE_OMP_MASTER:
640 case GIMPLE_OMP_TASKGROUP:
641 case GIMPLE_OMP_ORDERED:
642 case GIMPLE_OMP_SECTION:
643 case GIMPLE_OMP_PARALLEL:
644 case GIMPLE_OMP_TASK:
645 case GIMPLE_OMP_SECTIONS:
646 case GIMPLE_OMP_SINGLE:
647 case GIMPLE_OMP_TARGET:
648 case GIMPLE_OMP_TEAMS:
649 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
650 callback_op, wi);
651 if (ret)
652 return wi->callback_result;
653 break;
654
655 case GIMPLE_WITH_CLEANUP_EXPR:
656 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
657 callback_op, wi);
658 if (ret)
659 return wi->callback_result;
660 break;
661
662 case GIMPLE_TRANSACTION:
663 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (
664 as_a <gtransaction *> (stmt)),
665 callback_stmt, callback_op, wi);
666 if (ret)
667 return wi->callback_result;
668 break;
669
670 default:
671 gcc_assert (!gimple_has_substatements (stmt));
672 break;
673 }
674
675 return NULL;
676 }
677
678 /* From a tree operand OP return the base of a load or store operation
679 or NULL_TREE if OP is not a load or a store. */
680
681 static tree
682 get_base_loadstore (tree op)
683 {
684 while (handled_component_p (op))
685 op = TREE_OPERAND (op, 0);
686 if (DECL_P (op)
687 || INDIRECT_REF_P (op)
688 || TREE_CODE (op) == MEM_REF
689 || TREE_CODE (op) == TARGET_MEM_REF)
690 return op;
691 return NULL_TREE;
692 }
693
694
695 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
696 VISIT_ADDR if non-NULL on loads, store and address-taken operands
697 passing the STMT, the base of the operand, the operand itself containing
698 the base and DATA to it. The base will be either a decl, an indirect
699 reference (including TARGET_MEM_REF) or the argument of an address
700 expression.
701 Returns the results of these callbacks or'ed. */
702
703 bool
704 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
705 walk_stmt_load_store_addr_fn visit_load,
706 walk_stmt_load_store_addr_fn visit_store,
707 walk_stmt_load_store_addr_fn visit_addr)
708 {
709 bool ret = false;
710 unsigned i;
711 if (gimple_assign_single_p (stmt))
712 {
713 tree lhs, rhs, arg;
714 if (visit_store)
715 {
716 arg = gimple_assign_lhs (stmt);
717 lhs = get_base_loadstore (arg);
718 if (lhs)
719 ret |= visit_store (stmt, lhs, arg, data);
720 }
721 arg = gimple_assign_rhs1 (stmt);
722 rhs = arg;
723 while (handled_component_p (rhs))
724 rhs = TREE_OPERAND (rhs, 0);
725 if (visit_addr)
726 {
727 if (TREE_CODE (rhs) == ADDR_EXPR)
728 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), arg, data);
729 else if (TREE_CODE (rhs) == TARGET_MEM_REF
730 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
731 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), arg,
732 data);
733 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
734 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
735 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
736 0), arg, data);
737 else if (TREE_CODE (rhs) == CONSTRUCTOR)
738 {
739 unsigned int ix;
740 tree val;
741
742 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
743 if (TREE_CODE (val) == ADDR_EXPR)
744 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), arg, data);
745 else if (TREE_CODE (val) == OBJ_TYPE_REF
746 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
747 ret |= visit_addr (stmt,
748 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
749 0), arg, data);
750 }
751 lhs = gimple_assign_lhs (stmt);
752 if (TREE_CODE (lhs) == TARGET_MEM_REF
753 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
754 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), lhs, data);
755 }
756 if (visit_load)
757 {
758 rhs = get_base_loadstore (rhs);
759 if (rhs)
760 ret |= visit_load (stmt, rhs, arg, data);
761 }
762 }
763 else if (visit_addr
764 && (is_gimple_assign (stmt)
765 || gimple_code (stmt) == GIMPLE_COND))
766 {
767 for (i = 0; i < gimple_num_ops (stmt); ++i)
768 {
769 tree op = gimple_op (stmt, i);
770 if (op == NULL_TREE)
771 ;
772 else if (TREE_CODE (op) == ADDR_EXPR)
773 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
774 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
775 tree with two operands. */
776 else if (i == 1 && COMPARISON_CLASS_P (op))
777 {
778 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
779 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
780 0), op, data);
781 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
782 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
783 0), op, data);
784 }
785 }
786 }
787 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
788 {
789 if (visit_store)
790 {
791 tree arg = gimple_call_lhs (call_stmt);
792 if (arg)
793 {
794 tree lhs = get_base_loadstore (arg);
795 if (lhs)
796 ret |= visit_store (stmt, lhs, arg, data);
797 }
798 }
799 if (visit_load || visit_addr)
800 for (i = 0; i < gimple_call_num_args (call_stmt); ++i)
801 {
802 tree arg = gimple_call_arg (call_stmt, i);
803 if (visit_addr
804 && TREE_CODE (arg) == ADDR_EXPR)
805 ret |= visit_addr (stmt, TREE_OPERAND (arg, 0), arg, data);
806 else if (visit_load)
807 {
808 tree rhs = get_base_loadstore (arg);
809 if (rhs)
810 ret |= visit_load (stmt, rhs, arg, data);
811 }
812 }
813 if (visit_addr
814 && gimple_call_chain (call_stmt)
815 && TREE_CODE (gimple_call_chain (call_stmt)) == ADDR_EXPR)
816 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (call_stmt), 0),
817 gimple_call_chain (call_stmt), data);
818 if (visit_addr
819 && gimple_call_return_slot_opt_p (call_stmt)
820 && gimple_call_lhs (call_stmt) != NULL_TREE
821 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call_stmt))))
822 ret |= visit_addr (stmt, gimple_call_lhs (call_stmt),
823 gimple_call_lhs (call_stmt), data);
824 }
825 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
826 {
827 unsigned noutputs;
828 const char *constraint;
829 const char **oconstraints;
830 bool allows_mem, allows_reg, is_inout;
831 noutputs = gimple_asm_noutputs (asm_stmt);
832 oconstraints = XALLOCAVEC (const char *, noutputs);
833 if (visit_store || visit_addr)
834 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
835 {
836 tree link = gimple_asm_output_op (asm_stmt, i);
837 tree op = get_base_loadstore (TREE_VALUE (link));
838 if (op && visit_store)
839 ret |= visit_store (stmt, op, TREE_VALUE (link), data);
840 if (visit_addr)
841 {
842 constraint = TREE_STRING_POINTER
843 (TREE_VALUE (TREE_PURPOSE (link)));
844 oconstraints[i] = constraint;
845 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
846 &allows_reg, &is_inout);
847 if (op && !allows_reg && allows_mem)
848 ret |= visit_addr (stmt, op, TREE_VALUE (link), data);
849 }
850 }
851 if (visit_load || visit_addr)
852 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
853 {
854 tree link = gimple_asm_input_op (asm_stmt, i);
855 tree op = TREE_VALUE (link);
856 if (visit_addr
857 && TREE_CODE (op) == ADDR_EXPR)
858 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
859 else if (visit_load || visit_addr)
860 {
861 op = get_base_loadstore (op);
862 if (op)
863 {
864 if (visit_load)
865 ret |= visit_load (stmt, op, TREE_VALUE (link), data);
866 if (visit_addr)
867 {
868 constraint = TREE_STRING_POINTER
869 (TREE_VALUE (TREE_PURPOSE (link)));
870 parse_input_constraint (&constraint, 0, 0, noutputs,
871 0, oconstraints,
872 &allows_mem, &allows_reg);
873 if (!allows_reg && allows_mem)
874 ret |= visit_addr (stmt, op, TREE_VALUE (link),
875 data);
876 }
877 }
878 }
879 }
880 }
881 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
882 {
883 tree op = gimple_return_retval (return_stmt);
884 if (op)
885 {
886 if (visit_addr
887 && TREE_CODE (op) == ADDR_EXPR)
888 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
889 else if (visit_load)
890 {
891 tree base = get_base_loadstore (op);
892 if (base)
893 ret |= visit_load (stmt, base, op, data);
894 }
895 }
896 }
897 else if (visit_addr
898 && gimple_code (stmt) == GIMPLE_PHI)
899 {
900 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
901 {
902 tree op = gimple_phi_arg_def (stmt, i);
903 if (TREE_CODE (op) == ADDR_EXPR)
904 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
905 }
906 }
907 else if (visit_addr
908 && gimple_code (stmt) == GIMPLE_GOTO)
909 {
910 tree op = gimple_goto_dest (stmt);
911 if (TREE_CODE (op) == ADDR_EXPR)
912 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
913 }
914
915 return ret;
916 }
917
918 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
919 should make a faster clone for this case. */
920
921 bool
922 walk_stmt_load_store_ops (gimple stmt, void *data,
923 walk_stmt_load_store_addr_fn visit_load,
924 walk_stmt_load_store_addr_fn visit_store)
925 {
926 return walk_stmt_load_store_addr_ops (stmt, data,
927 visit_load, visit_store, NULL);
928 }