re PR ipa/58492 (ICE: verify_flow_info failed)
[gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "value-prof.h"
34 #include "flags.h"
35 #include "alias.h"
36 #include "demangle.h"
37 #include "langhooks.h"
38 #include "bitmap.h"
39
40
41 /* All the tuples have their operand vector (if present) at the very bottom
42 of the structure. Therefore, the offset required to find the
43 operands vector the size of the structure minus the size of the 1
44 element tree array at the end (see gimple_ops). */
45 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
46 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
47 EXPORTED_CONST size_t gimple_ops_offset_[] = {
48 #include "gsstruct.def"
49 };
50 #undef DEFGSSTRUCT
51
52 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
53 static const size_t gsstruct_code_size[] = {
54 #include "gsstruct.def"
55 };
56 #undef DEFGSSTRUCT
57
58 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
59 const char *const gimple_code_name[] = {
60 #include "gimple.def"
61 };
62 #undef DEFGSCODE
63
64 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
65 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
66 #include "gimple.def"
67 };
68 #undef DEFGSCODE
69
70 /* Gimple stats. */
71
72 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
73 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
74
75 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
76 static const char * const gimple_alloc_kind_names[] = {
77 "assignments",
78 "phi nodes",
79 "conditionals",
80 "everything else"
81 };
82
83 /* Private API manipulation functions shared only with some
84 other files. */
85 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
86 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
87
88 /* Gimple tuple constructors.
89 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
90 be passed a NULL to start with an empty sequence. */
91
92 /* Set the code for statement G to CODE. */
93
94 static inline void
95 gimple_set_code (gimple g, enum gimple_code code)
96 {
97 g->gsbase.code = code;
98 }
99
100 /* Return the number of bytes needed to hold a GIMPLE statement with
101 code CODE. */
102
103 static inline size_t
104 gimple_size (enum gimple_code code)
105 {
106 return gsstruct_code_size[gss_for_code (code)];
107 }
108
109 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
110 operands. */
111
112 gimple
113 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
114 {
115 size_t size;
116 gimple stmt;
117
118 size = gimple_size (code);
119 if (num_ops > 0)
120 size += sizeof (tree) * (num_ops - 1);
121
122 if (GATHER_STATISTICS)
123 {
124 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
125 gimple_alloc_counts[(int) kind]++;
126 gimple_alloc_sizes[(int) kind] += size;
127 }
128
129 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
130 gimple_set_code (stmt, code);
131 gimple_set_num_ops (stmt, num_ops);
132
133 /* Do not call gimple_set_modified here as it has other side
134 effects and this tuple is still not completely built. */
135 stmt->gsbase.modified = 1;
136 gimple_init_singleton (stmt);
137
138 return stmt;
139 }
140
141 /* Set SUBCODE to be the code of the expression computed by statement G. */
142
143 static inline void
144 gimple_set_subcode (gimple g, unsigned subcode)
145 {
146 /* We only have 16 bits for the RHS code. Assert that we are not
147 overflowing it. */
148 gcc_assert (subcode < (1 << 16));
149 g->gsbase.subcode = subcode;
150 }
151
152
153
154 /* Build a tuple with operands. CODE is the statement to build (which
155 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
156 for the new tuple. NUM_OPS is the number of operands to allocate. */
157
158 #define gimple_build_with_ops(c, s, n) \
159 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
160
161 static gimple
162 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
163 unsigned num_ops MEM_STAT_DECL)
164 {
165 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
166 gimple_set_subcode (s, subcode);
167
168 return s;
169 }
170
171
172 /* Build a GIMPLE_RETURN statement returning RETVAL. */
173
174 gimple
175 gimple_build_return (tree retval)
176 {
177 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 2);
178 if (retval)
179 gimple_return_set_retval (s, retval);
180 return s;
181 }
182
183 /* Reset alias information on call S. */
184
185 void
186 gimple_call_reset_alias_info (gimple s)
187 {
188 if (gimple_call_flags (s) & ECF_CONST)
189 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
190 else
191 pt_solution_reset (gimple_call_use_set (s));
192 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
193 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
194 else
195 pt_solution_reset (gimple_call_clobber_set (s));
196 }
197
198 /* Helper for gimple_build_call, gimple_build_call_valist,
199 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
200 components of a GIMPLE_CALL statement to function FN with NARGS
201 arguments. */
202
203 static inline gimple
204 gimple_build_call_1 (tree fn, unsigned nargs)
205 {
206 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
207 if (TREE_CODE (fn) == FUNCTION_DECL)
208 fn = build_fold_addr_expr (fn);
209 gimple_set_op (s, 1, fn);
210 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
211 gimple_call_reset_alias_info (s);
212 return s;
213 }
214
215
216 /* Build a GIMPLE_CALL statement to function FN with the arguments
217 specified in vector ARGS. */
218
219 gimple
220 gimple_build_call_vec (tree fn, vec<tree> args)
221 {
222 unsigned i;
223 unsigned nargs = args.length ();
224 gimple call = gimple_build_call_1 (fn, nargs);
225
226 for (i = 0; i < nargs; i++)
227 gimple_call_set_arg (call, i, args[i]);
228
229 return call;
230 }
231
232
233 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
234 arguments. The ... are the arguments. */
235
236 gimple
237 gimple_build_call (tree fn, unsigned nargs, ...)
238 {
239 va_list ap;
240 gimple call;
241 unsigned i;
242
243 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
244
245 call = gimple_build_call_1 (fn, nargs);
246
247 va_start (ap, nargs);
248 for (i = 0; i < nargs; i++)
249 gimple_call_set_arg (call, i, va_arg (ap, tree));
250 va_end (ap);
251
252 return call;
253 }
254
255
256 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
257 arguments. AP contains the arguments. */
258
259 gimple
260 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
261 {
262 gimple call;
263 unsigned i;
264
265 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
266
267 call = gimple_build_call_1 (fn, nargs);
268
269 for (i = 0; i < nargs; i++)
270 gimple_call_set_arg (call, i, va_arg (ap, tree));
271
272 return call;
273 }
274
275
276 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
277 Build the basic components of a GIMPLE_CALL statement to internal
278 function FN with NARGS arguments. */
279
280 static inline gimple
281 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
282 {
283 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
284 s->gsbase.subcode |= GF_CALL_INTERNAL;
285 gimple_call_set_internal_fn (s, fn);
286 gimple_call_reset_alias_info (s);
287 return s;
288 }
289
290
291 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
292 the number of arguments. The ... are the arguments. */
293
294 gimple
295 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
296 {
297 va_list ap;
298 gimple call;
299 unsigned i;
300
301 call = gimple_build_call_internal_1 (fn, nargs);
302 va_start (ap, nargs);
303 for (i = 0; i < nargs; i++)
304 gimple_call_set_arg (call, i, va_arg (ap, tree));
305 va_end (ap);
306
307 return call;
308 }
309
310
311 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
312 specified in vector ARGS. */
313
314 gimple
315 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
316 {
317 unsigned i, nargs;
318 gimple call;
319
320 nargs = args.length ();
321 call = gimple_build_call_internal_1 (fn, nargs);
322 for (i = 0; i < nargs; i++)
323 gimple_call_set_arg (call, i, args[i]);
324
325 return call;
326 }
327
328
329 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
330 assumed to be in GIMPLE form already. Minimal checking is done of
331 this fact. */
332
333 gimple
334 gimple_build_call_from_tree (tree t)
335 {
336 unsigned i, nargs;
337 gimple call;
338 tree fndecl = get_callee_fndecl (t);
339
340 gcc_assert (TREE_CODE (t) == CALL_EXPR);
341
342 nargs = call_expr_nargs (t);
343 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
344
345 for (i = 0; i < nargs; i++)
346 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
347
348 gimple_set_block (call, TREE_BLOCK (t));
349
350 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
351 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
352 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
353 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
354 if (fndecl
355 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
356 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
357 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
358 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
359 else
360 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
361 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
362 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
363 gimple_set_no_warning (call, TREE_NO_WARNING (t));
364
365 return call;
366 }
367
368
369 /* Return index of INDEX's non bound argument of the call. */
370
371 unsigned
372 gimple_call_get_nobnd_arg_index (const_gimple gs, unsigned index)
373 {
374 unsigned num_args = gimple_call_num_args (gs);
375 for (unsigned n = 0; n < num_args; n++)
376 {
377 if (POINTER_BOUNDS_P (gimple_call_arg (gs, n)))
378 continue;
379 else if (index)
380 index--;
381 else
382 return n;
383 }
384
385 gcc_unreachable ();
386 }
387
388
389 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
390 *OP1_P, *OP2_P and *OP3_P respectively. */
391
392 void
393 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
394 tree *op2_p, tree *op3_p)
395 {
396 enum gimple_rhs_class grhs_class;
397
398 *subcode_p = TREE_CODE (expr);
399 grhs_class = get_gimple_rhs_class (*subcode_p);
400
401 if (grhs_class == GIMPLE_TERNARY_RHS)
402 {
403 *op1_p = TREE_OPERAND (expr, 0);
404 *op2_p = TREE_OPERAND (expr, 1);
405 *op3_p = TREE_OPERAND (expr, 2);
406 }
407 else if (grhs_class == GIMPLE_BINARY_RHS)
408 {
409 *op1_p = TREE_OPERAND (expr, 0);
410 *op2_p = TREE_OPERAND (expr, 1);
411 *op3_p = NULL_TREE;
412 }
413 else if (grhs_class == GIMPLE_UNARY_RHS)
414 {
415 *op1_p = TREE_OPERAND (expr, 0);
416 *op2_p = NULL_TREE;
417 *op3_p = NULL_TREE;
418 }
419 else if (grhs_class == GIMPLE_SINGLE_RHS)
420 {
421 *op1_p = expr;
422 *op2_p = NULL_TREE;
423 *op3_p = NULL_TREE;
424 }
425 else
426 gcc_unreachable ();
427 }
428
429
430 /* Build a GIMPLE_ASSIGN statement.
431
432 LHS of the assignment.
433 RHS of the assignment which can be unary or binary. */
434
435 gimple
436 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
437 {
438 enum tree_code subcode;
439 tree op1, op2, op3;
440
441 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
442 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
443 PASS_MEM_STAT);
444 }
445
446
447 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
448 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
449 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
450
451 gimple
452 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
453 tree op2, tree op3 MEM_STAT_DECL)
454 {
455 unsigned num_ops;
456 gimple p;
457
458 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
459 code). */
460 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
461
462 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
463 PASS_MEM_STAT);
464 gimple_assign_set_lhs (p, lhs);
465 gimple_assign_set_rhs1 (p, op1);
466 if (op2)
467 {
468 gcc_assert (num_ops > 2);
469 gimple_assign_set_rhs2 (p, op2);
470 }
471
472 if (op3)
473 {
474 gcc_assert (num_ops > 3);
475 gimple_assign_set_rhs3 (p, op3);
476 }
477
478 return p;
479 }
480
481 gimple
482 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
483 tree op2 MEM_STAT_DECL)
484 {
485 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
486 PASS_MEM_STAT);
487 }
488
489
490 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
491
492 DST/SRC are the destination and source respectively. You can pass
493 ungimplified trees in DST or SRC, in which case they will be
494 converted to a gimple operand if necessary.
495
496 This function returns the newly created GIMPLE_ASSIGN tuple. */
497
498 gimple
499 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
500 {
501 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
502 gimplify_and_add (t, seq_p);
503 ggc_free (t);
504 return gimple_seq_last_stmt (*seq_p);
505 }
506
507
508 /* Build a GIMPLE_COND statement.
509
510 PRED is the condition used to compare LHS and the RHS.
511 T_LABEL is the label to jump to if the condition is true.
512 F_LABEL is the label to jump to otherwise. */
513
514 gimple
515 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
516 tree t_label, tree f_label)
517 {
518 gimple p;
519
520 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
521 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
522 gimple_cond_set_lhs (p, lhs);
523 gimple_cond_set_rhs (p, rhs);
524 gimple_cond_set_true_label (p, t_label);
525 gimple_cond_set_false_label (p, f_label);
526 return p;
527 }
528
529
530 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
531
532 void
533 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
534 tree *lhs_p, tree *rhs_p)
535 {
536 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
537 || TREE_CODE (cond) == TRUTH_NOT_EXPR
538 || is_gimple_min_invariant (cond)
539 || SSA_VAR_P (cond));
540
541 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
542
543 /* Canonicalize conditionals of the form 'if (!VAL)'. */
544 if (*code_p == TRUTH_NOT_EXPR)
545 {
546 *code_p = EQ_EXPR;
547 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
548 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
549 }
550 /* Canonicalize conditionals of the form 'if (VAL)' */
551 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
552 {
553 *code_p = NE_EXPR;
554 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
555 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
556 }
557 }
558
559
560 /* Build a GIMPLE_COND statement from the conditional expression tree
561 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
562
563 gimple
564 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
565 {
566 enum tree_code code;
567 tree lhs, rhs;
568
569 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
570 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
571 }
572
573 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
574 boolean expression tree COND. */
575
576 void
577 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
578 {
579 enum tree_code code;
580 tree lhs, rhs;
581
582 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
583 gimple_cond_set_condition (stmt, code, lhs, rhs);
584 }
585
586 /* Build a GIMPLE_LABEL statement for LABEL. */
587
588 gimple
589 gimple_build_label (tree label)
590 {
591 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
592 gimple_label_set_label (p, label);
593 return p;
594 }
595
596 /* Build a GIMPLE_GOTO statement to label DEST. */
597
598 gimple
599 gimple_build_goto (tree dest)
600 {
601 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
602 gimple_goto_set_dest (p, dest);
603 return p;
604 }
605
606
607 /* Build a GIMPLE_NOP statement. */
608
609 gimple
610 gimple_build_nop (void)
611 {
612 return gimple_alloc (GIMPLE_NOP, 0);
613 }
614
615
616 /* Build a GIMPLE_BIND statement.
617 VARS are the variables in BODY.
618 BLOCK is the containing block. */
619
620 gimple
621 gimple_build_bind (tree vars, gimple_seq body, tree block)
622 {
623 gimple p = gimple_alloc (GIMPLE_BIND, 0);
624 gimple_bind_set_vars (p, vars);
625 if (body)
626 gimple_bind_set_body (p, body);
627 if (block)
628 gimple_bind_set_block (p, block);
629 return p;
630 }
631
632 /* Helper function to set the simple fields of a asm stmt.
633
634 STRING is a pointer to a string that is the asm blocks assembly code.
635 NINPUT is the number of register inputs.
636 NOUTPUT is the number of register outputs.
637 NCLOBBERS is the number of clobbered registers.
638 */
639
640 static inline gimple
641 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
642 unsigned nclobbers, unsigned nlabels)
643 {
644 gimple p;
645 int size = strlen (string);
646
647 /* ASMs with labels cannot have outputs. This should have been
648 enforced by the front end. */
649 gcc_assert (nlabels == 0 || noutputs == 0);
650
651 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
652 ninputs + noutputs + nclobbers + nlabels);
653
654 p->gimple_asm.ni = ninputs;
655 p->gimple_asm.no = noutputs;
656 p->gimple_asm.nc = nclobbers;
657 p->gimple_asm.nl = nlabels;
658 p->gimple_asm.string = ggc_alloc_string (string, size);
659
660 if (GATHER_STATISTICS)
661 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
662
663 return p;
664 }
665
666 /* Build a GIMPLE_ASM statement.
667
668 STRING is the assembly code.
669 NINPUT is the number of register inputs.
670 NOUTPUT is the number of register outputs.
671 NCLOBBERS is the number of clobbered registers.
672 INPUTS is a vector of the input register parameters.
673 OUTPUTS is a vector of the output register parameters.
674 CLOBBERS is a vector of the clobbered register parameters.
675 LABELS is a vector of destination labels. */
676
677 gimple
678 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
679 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
680 vec<tree, va_gc> *labels)
681 {
682 gimple p;
683 unsigned i;
684
685 p = gimple_build_asm_1 (string,
686 vec_safe_length (inputs),
687 vec_safe_length (outputs),
688 vec_safe_length (clobbers),
689 vec_safe_length (labels));
690
691 for (i = 0; i < vec_safe_length (inputs); i++)
692 gimple_asm_set_input_op (p, i, (*inputs)[i]);
693
694 for (i = 0; i < vec_safe_length (outputs); i++)
695 gimple_asm_set_output_op (p, i, (*outputs)[i]);
696
697 for (i = 0; i < vec_safe_length (clobbers); i++)
698 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
699
700 for (i = 0; i < vec_safe_length (labels); i++)
701 gimple_asm_set_label_op (p, i, (*labels)[i]);
702
703 return p;
704 }
705
706 /* Build a GIMPLE_CATCH statement.
707
708 TYPES are the catch types.
709 HANDLER is the exception handler. */
710
711 gimple
712 gimple_build_catch (tree types, gimple_seq handler)
713 {
714 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
715 gimple_catch_set_types (p, types);
716 if (handler)
717 gimple_catch_set_handler (p, handler);
718
719 return p;
720 }
721
722 /* Build a GIMPLE_EH_FILTER statement.
723
724 TYPES are the filter's types.
725 FAILURE is the filter's failure action. */
726
727 gimple
728 gimple_build_eh_filter (tree types, gimple_seq failure)
729 {
730 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
731 gimple_eh_filter_set_types (p, types);
732 if (failure)
733 gimple_eh_filter_set_failure (p, failure);
734
735 return p;
736 }
737
738 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
739
740 gimple
741 gimple_build_eh_must_not_throw (tree decl)
742 {
743 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
744
745 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
746 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
747 gimple_eh_must_not_throw_set_fndecl (p, decl);
748
749 return p;
750 }
751
752 /* Build a GIMPLE_EH_ELSE statement. */
753
754 gimple
755 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
756 {
757 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
758 gimple_eh_else_set_n_body (p, n_body);
759 gimple_eh_else_set_e_body (p, e_body);
760 return p;
761 }
762
763 /* Build a GIMPLE_TRY statement.
764
765 EVAL is the expression to evaluate.
766 CLEANUP is the cleanup expression.
767 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
768 whether this is a try/catch or a try/finally respectively. */
769
770 gimple
771 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
772 enum gimple_try_flags kind)
773 {
774 gimple p;
775
776 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
777 p = gimple_alloc (GIMPLE_TRY, 0);
778 gimple_set_subcode (p, kind);
779 if (eval)
780 gimple_try_set_eval (p, eval);
781 if (cleanup)
782 gimple_try_set_cleanup (p, cleanup);
783
784 return p;
785 }
786
787 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
788
789 CLEANUP is the cleanup expression. */
790
791 gimple
792 gimple_build_wce (gimple_seq cleanup)
793 {
794 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
795 if (cleanup)
796 gimple_wce_set_cleanup (p, cleanup);
797
798 return p;
799 }
800
801
802 /* Build a GIMPLE_RESX statement. */
803
804 gimple
805 gimple_build_resx (int region)
806 {
807 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
808 p->gimple_eh_ctrl.region = region;
809 return p;
810 }
811
812
813 /* The helper for constructing a gimple switch statement.
814 INDEX is the switch's index.
815 NLABELS is the number of labels in the switch excluding the default.
816 DEFAULT_LABEL is the default label for the switch statement. */
817
818 gimple
819 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
820 {
821 /* nlabels + 1 default label + 1 index. */
822 gcc_checking_assert (default_label);
823 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
824 1 + 1 + nlabels);
825 gimple_switch_set_index (p, index);
826 gimple_switch_set_default_label (p, default_label);
827 return p;
828 }
829
830 /* Build a GIMPLE_SWITCH statement.
831
832 INDEX is the switch's index.
833 DEFAULT_LABEL is the default label
834 ARGS is a vector of labels excluding the default. */
835
836 gimple
837 gimple_build_switch (tree index, tree default_label, vec<tree> args)
838 {
839 unsigned i, nlabels = args.length ();
840
841 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
842
843 /* Copy the labels from the vector to the switch statement. */
844 for (i = 0; i < nlabels; i++)
845 gimple_switch_set_label (p, i + 1, args[i]);
846
847 return p;
848 }
849
850 /* Build a GIMPLE_EH_DISPATCH statement. */
851
852 gimple
853 gimple_build_eh_dispatch (int region)
854 {
855 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
856 p->gimple_eh_ctrl.region = region;
857 return p;
858 }
859
860 /* Build a new GIMPLE_DEBUG_BIND statement.
861
862 VAR is bound to VALUE; block and location are taken from STMT. */
863
864 gimple
865 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
866 {
867 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
868 (unsigned)GIMPLE_DEBUG_BIND, 2
869 PASS_MEM_STAT);
870
871 gimple_debug_bind_set_var (p, var);
872 gimple_debug_bind_set_value (p, value);
873 if (stmt)
874 gimple_set_location (p, gimple_location (stmt));
875
876 return p;
877 }
878
879
880 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
881
882 VAR is bound to VALUE; block and location are taken from STMT. */
883
884 gimple
885 gimple_build_debug_source_bind_stat (tree var, tree value,
886 gimple stmt MEM_STAT_DECL)
887 {
888 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
889 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
890 PASS_MEM_STAT);
891
892 gimple_debug_source_bind_set_var (p, var);
893 gimple_debug_source_bind_set_value (p, value);
894 if (stmt)
895 gimple_set_location (p, gimple_location (stmt));
896
897 return p;
898 }
899
900
901 /* Build a GIMPLE_OMP_CRITICAL statement.
902
903 BODY is the sequence of statements for which only one thread can execute.
904 NAME is optional identifier for this critical block. */
905
906 gimple
907 gimple_build_omp_critical (gimple_seq body, tree name)
908 {
909 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
910 gimple_omp_critical_set_name (p, name);
911 if (body)
912 gimple_omp_set_body (p, body);
913
914 return p;
915 }
916
917 /* Build a GIMPLE_OMP_FOR statement.
918
919 BODY is sequence of statements inside the for loop.
920 KIND is the `for' variant.
921 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
922 lastprivate, reductions, ordered, schedule, and nowait.
923 COLLAPSE is the collapse count.
924 PRE_BODY is the sequence of statements that are loop invariant. */
925
926 gimple
927 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
928 gimple_seq pre_body)
929 {
930 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
931 if (body)
932 gimple_omp_set_body (p, body);
933 gimple_omp_for_set_clauses (p, clauses);
934 gimple_omp_for_set_kind (p, kind);
935 p->gimple_omp_for.collapse = collapse;
936 p->gimple_omp_for.iter
937 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
938 if (pre_body)
939 gimple_omp_for_set_pre_body (p, pre_body);
940
941 return p;
942 }
943
944
945 /* Build a GIMPLE_OMP_PARALLEL statement.
946
947 BODY is sequence of statements which are executed in parallel.
948 CLAUSES, are the OMP parallel construct's clauses.
949 CHILD_FN is the function created for the parallel threads to execute.
950 DATA_ARG are the shared data argument(s). */
951
952 gimple
953 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
954 tree data_arg)
955 {
956 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
957 if (body)
958 gimple_omp_set_body (p, body);
959 gimple_omp_parallel_set_clauses (p, clauses);
960 gimple_omp_parallel_set_child_fn (p, child_fn);
961 gimple_omp_parallel_set_data_arg (p, data_arg);
962
963 return p;
964 }
965
966
967 /* Build a GIMPLE_OMP_TASK statement.
968
969 BODY is sequence of statements which are executed by the explicit task.
970 CLAUSES, are the OMP parallel construct's clauses.
971 CHILD_FN is the function created for the parallel threads to execute.
972 DATA_ARG are the shared data argument(s).
973 COPY_FN is the optional function for firstprivate initialization.
974 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
975
976 gimple
977 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
978 tree data_arg, tree copy_fn, tree arg_size,
979 tree arg_align)
980 {
981 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
982 if (body)
983 gimple_omp_set_body (p, body);
984 gimple_omp_task_set_clauses (p, clauses);
985 gimple_omp_task_set_child_fn (p, child_fn);
986 gimple_omp_task_set_data_arg (p, data_arg);
987 gimple_omp_task_set_copy_fn (p, copy_fn);
988 gimple_omp_task_set_arg_size (p, arg_size);
989 gimple_omp_task_set_arg_align (p, arg_align);
990
991 return p;
992 }
993
994
995 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
996
997 BODY is the sequence of statements in the section. */
998
999 gimple
1000 gimple_build_omp_section (gimple_seq body)
1001 {
1002 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1003 if (body)
1004 gimple_omp_set_body (p, body);
1005
1006 return p;
1007 }
1008
1009
1010 /* Build a GIMPLE_OMP_MASTER statement.
1011
1012 BODY is the sequence of statements to be executed by just the master. */
1013
1014 gimple
1015 gimple_build_omp_master (gimple_seq body)
1016 {
1017 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1018 if (body)
1019 gimple_omp_set_body (p, body);
1020
1021 return p;
1022 }
1023
1024
1025 /* Build a GIMPLE_OMP_TASKGROUP statement.
1026
1027 BODY is the sequence of statements to be executed by the taskgroup
1028 construct. */
1029
1030 gimple
1031 gimple_build_omp_taskgroup (gimple_seq body)
1032 {
1033 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
1034 if (body)
1035 gimple_omp_set_body (p, body);
1036
1037 return p;
1038 }
1039
1040
1041 /* Build a GIMPLE_OMP_CONTINUE statement.
1042
1043 CONTROL_DEF is the definition of the control variable.
1044 CONTROL_USE is the use of the control variable. */
1045
1046 gimple
1047 gimple_build_omp_continue (tree control_def, tree control_use)
1048 {
1049 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1050 gimple_omp_continue_set_control_def (p, control_def);
1051 gimple_omp_continue_set_control_use (p, control_use);
1052 return p;
1053 }
1054
1055 /* Build a GIMPLE_OMP_ORDERED statement.
1056
1057 BODY is the sequence of statements inside a loop that will executed in
1058 sequence. */
1059
1060 gimple
1061 gimple_build_omp_ordered (gimple_seq body)
1062 {
1063 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1064 if (body)
1065 gimple_omp_set_body (p, body);
1066
1067 return p;
1068 }
1069
1070
1071 /* Build a GIMPLE_OMP_RETURN statement.
1072 WAIT_P is true if this is a non-waiting return. */
1073
1074 gimple
1075 gimple_build_omp_return (bool wait_p)
1076 {
1077 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1078 if (wait_p)
1079 gimple_omp_return_set_nowait (p);
1080
1081 return p;
1082 }
1083
1084
1085 /* Build a GIMPLE_OMP_SECTIONS statement.
1086
1087 BODY is a sequence of section statements.
1088 CLAUSES are any of the OMP sections contsruct's clauses: private,
1089 firstprivate, lastprivate, reduction, and nowait. */
1090
1091 gimple
1092 gimple_build_omp_sections (gimple_seq body, tree clauses)
1093 {
1094 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1095 if (body)
1096 gimple_omp_set_body (p, body);
1097 gimple_omp_sections_set_clauses (p, clauses);
1098
1099 return p;
1100 }
1101
1102
1103 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1104
1105 gimple
1106 gimple_build_omp_sections_switch (void)
1107 {
1108 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1109 }
1110
1111
1112 /* Build a GIMPLE_OMP_SINGLE statement.
1113
1114 BODY is the sequence of statements that will be executed once.
1115 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1116 copyprivate, nowait. */
1117
1118 gimple
1119 gimple_build_omp_single (gimple_seq body, tree clauses)
1120 {
1121 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1122 if (body)
1123 gimple_omp_set_body (p, body);
1124 gimple_omp_single_set_clauses (p, clauses);
1125
1126 return p;
1127 }
1128
1129
1130 /* Build a GIMPLE_OMP_TARGET statement.
1131
1132 BODY is the sequence of statements that will be executed.
1133 CLAUSES are any of the OMP target construct's clauses. */
1134
1135 gimple
1136 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1137 {
1138 gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
1139 if (body)
1140 gimple_omp_set_body (p, body);
1141 gimple_omp_target_set_clauses (p, clauses);
1142 gimple_omp_target_set_kind (p, kind);
1143
1144 return p;
1145 }
1146
1147
1148 /* Build a GIMPLE_OMP_TEAMS statement.
1149
1150 BODY is the sequence of statements that will be executed.
1151 CLAUSES are any of the OMP teams construct's clauses. */
1152
1153 gimple
1154 gimple_build_omp_teams (gimple_seq body, tree clauses)
1155 {
1156 gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
1157 if (body)
1158 gimple_omp_set_body (p, body);
1159 gimple_omp_teams_set_clauses (p, clauses);
1160
1161 return p;
1162 }
1163
1164
1165 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1166
1167 gimple
1168 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1169 {
1170 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1171 gimple_omp_atomic_load_set_lhs (p, lhs);
1172 gimple_omp_atomic_load_set_rhs (p, rhs);
1173 return p;
1174 }
1175
1176 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1177
1178 VAL is the value we are storing. */
1179
1180 gimple
1181 gimple_build_omp_atomic_store (tree val)
1182 {
1183 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1184 gimple_omp_atomic_store_set_val (p, val);
1185 return p;
1186 }
1187
1188 /* Build a GIMPLE_TRANSACTION statement. */
1189
1190 gimple
1191 gimple_build_transaction (gimple_seq body, tree label)
1192 {
1193 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1194 gimple_transaction_set_body (p, body);
1195 gimple_transaction_set_label (p, label);
1196 return p;
1197 }
1198
1199 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1200 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1201
1202 gimple
1203 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1204 {
1205 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1206 /* Ensure all the predictors fit into the lower bits of the subcode. */
1207 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1208 gimple_predict_set_predictor (p, predictor);
1209 gimple_predict_set_outcome (p, outcome);
1210 return p;
1211 }
1212
1213 #if defined ENABLE_GIMPLE_CHECKING
1214 /* Complain of a gimple type mismatch and die. */
1215
1216 void
1217 gimple_check_failed (const_gimple gs, const char *file, int line,
1218 const char *function, enum gimple_code code,
1219 enum tree_code subcode)
1220 {
1221 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1222 gimple_code_name[code],
1223 get_tree_code_name (subcode),
1224 gimple_code_name[gimple_code (gs)],
1225 gs->gsbase.subcode > 0
1226 ? get_tree_code_name ((enum tree_code) gs->gsbase.subcode)
1227 : "",
1228 function, trim_filename (file), line);
1229 }
1230 #endif /* ENABLE_GIMPLE_CHECKING */
1231
1232
1233 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1234 *SEQ_P is NULL, a new sequence is allocated. */
1235
1236 void
1237 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1238 {
1239 gimple_stmt_iterator si;
1240 if (gs == NULL)
1241 return;
1242
1243 si = gsi_last (*seq_p);
1244 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1245 }
1246
1247
1248 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1249 NULL, a new sequence is allocated. */
1250
1251 void
1252 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1253 {
1254 gimple_stmt_iterator si;
1255 if (src == NULL)
1256 return;
1257
1258 si = gsi_last (*dst_p);
1259 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1260 }
1261
1262
1263 /* Helper function of empty_body_p. Return true if STMT is an empty
1264 statement. */
1265
1266 static bool
1267 empty_stmt_p (gimple stmt)
1268 {
1269 if (gimple_code (stmt) == GIMPLE_NOP)
1270 return true;
1271 if (gimple_code (stmt) == GIMPLE_BIND)
1272 return empty_body_p (gimple_bind_body (stmt));
1273 return false;
1274 }
1275
1276
1277 /* Return true if BODY contains nothing but empty statements. */
1278
1279 bool
1280 empty_body_p (gimple_seq body)
1281 {
1282 gimple_stmt_iterator i;
1283
1284 if (gimple_seq_empty_p (body))
1285 return true;
1286 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1287 if (!empty_stmt_p (gsi_stmt (i))
1288 && !is_gimple_debug (gsi_stmt (i)))
1289 return false;
1290
1291 return true;
1292 }
1293
1294
1295 /* Perform a deep copy of sequence SRC and return the result. */
1296
1297 gimple_seq
1298 gimple_seq_copy (gimple_seq src)
1299 {
1300 gimple_stmt_iterator gsi;
1301 gimple_seq new_seq = NULL;
1302 gimple stmt;
1303
1304 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1305 {
1306 stmt = gimple_copy (gsi_stmt (gsi));
1307 gimple_seq_add_stmt (&new_seq, stmt);
1308 }
1309
1310 return new_seq;
1311 }
1312
1313
1314 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
1315 on each one. WI is as in walk_gimple_stmt.
1316
1317 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1318 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1319 produced the value is returned if this statement has not been
1320 removed by a callback (wi->removed_stmt). If the statement has
1321 been removed, NULL is returned.
1322
1323 Otherwise, all the statements are walked and NULL returned. */
1324
1325 gimple
1326 walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
1327 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1328 {
1329 gimple_stmt_iterator gsi;
1330
1331 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
1332 {
1333 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1334 if (ret)
1335 {
1336 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1337 to hold it. */
1338 gcc_assert (wi);
1339 wi->callback_result = ret;
1340
1341 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1342 }
1343
1344 if (!wi->removed_stmt)
1345 gsi_next (&gsi);
1346 }
1347
1348 if (wi)
1349 wi->callback_result = NULL_TREE;
1350
1351 return NULL;
1352 }
1353
1354
1355 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1356 changed by the callbacks. */
1357
1358 gimple
1359 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1360 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1361 {
1362 gimple_seq seq2 = seq;
1363 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
1364 gcc_assert (seq2 == seq);
1365 return ret;
1366 }
1367
1368
1369 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1370
1371 static tree
1372 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1373 struct walk_stmt_info *wi)
1374 {
1375 tree ret, op;
1376 unsigned noutputs;
1377 const char **oconstraints;
1378 unsigned i, n;
1379 const char *constraint;
1380 bool allows_mem, allows_reg, is_inout;
1381
1382 noutputs = gimple_asm_noutputs (stmt);
1383 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1384
1385 if (wi)
1386 wi->is_lhs = true;
1387
1388 for (i = 0; i < noutputs; i++)
1389 {
1390 op = gimple_asm_output_op (stmt, i);
1391 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1392 oconstraints[i] = constraint;
1393 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1394 &is_inout);
1395 if (wi)
1396 wi->val_only = (allows_reg || !allows_mem);
1397 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1398 if (ret)
1399 return ret;
1400 }
1401
1402 n = gimple_asm_ninputs (stmt);
1403 for (i = 0; i < n; i++)
1404 {
1405 op = gimple_asm_input_op (stmt, i);
1406 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1407 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1408 oconstraints, &allows_mem, &allows_reg);
1409 if (wi)
1410 {
1411 wi->val_only = (allows_reg || !allows_mem);
1412 /* Although input "m" is not really a LHS, we need a lvalue. */
1413 wi->is_lhs = !wi->val_only;
1414 }
1415 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1416 if (ret)
1417 return ret;
1418 }
1419
1420 if (wi)
1421 {
1422 wi->is_lhs = false;
1423 wi->val_only = true;
1424 }
1425
1426 n = gimple_asm_nlabels (stmt);
1427 for (i = 0; i < n; i++)
1428 {
1429 op = gimple_asm_label_op (stmt, i);
1430 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1431 if (ret)
1432 return ret;
1433 }
1434
1435 return NULL_TREE;
1436 }
1437
1438
1439 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1440 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1441
1442 CALLBACK_OP is called on each operand of STMT via walk_tree.
1443 Additional parameters to walk_tree must be stored in WI. For each operand
1444 OP, walk_tree is called as:
1445
1446 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1447
1448 If CALLBACK_OP returns non-NULL for an operand, the remaining
1449 operands are not scanned.
1450
1451 The return value is that returned by the last call to walk_tree, or
1452 NULL_TREE if no CALLBACK_OP is specified. */
1453
1454 tree
1455 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1456 struct walk_stmt_info *wi)
1457 {
1458 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1459 unsigned i;
1460 tree ret = NULL_TREE;
1461
1462 switch (gimple_code (stmt))
1463 {
1464 case GIMPLE_ASSIGN:
1465 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1466 is a register variable, we may use a COMPONENT_REF on the RHS. */
1467 if (wi)
1468 {
1469 tree lhs = gimple_assign_lhs (stmt);
1470 wi->val_only
1471 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1472 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1473 }
1474
1475 for (i = 1; i < gimple_num_ops (stmt); i++)
1476 {
1477 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1478 pset);
1479 if (ret)
1480 return ret;
1481 }
1482
1483 /* Walk the LHS. If the RHS is appropriate for a memory, we
1484 may use a COMPONENT_REF on the LHS. */
1485 if (wi)
1486 {
1487 /* If the RHS is of a non-renamable type or is a register variable,
1488 we may use a COMPONENT_REF on the LHS. */
1489 tree rhs1 = gimple_assign_rhs1 (stmt);
1490 wi->val_only
1491 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1492 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1493 wi->is_lhs = true;
1494 }
1495
1496 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1497 if (ret)
1498 return ret;
1499
1500 if (wi)
1501 {
1502 wi->val_only = true;
1503 wi->is_lhs = false;
1504 }
1505 break;
1506
1507 case GIMPLE_CALL:
1508 if (wi)
1509 {
1510 wi->is_lhs = false;
1511 wi->val_only = true;
1512 }
1513
1514 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1515 if (ret)
1516 return ret;
1517
1518 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1519 if (ret)
1520 return ret;
1521
1522 for (i = 0; i < gimple_call_num_args (stmt); i++)
1523 {
1524 if (wi)
1525 wi->val_only
1526 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1527 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1528 pset);
1529 if (ret)
1530 return ret;
1531 }
1532
1533 if (gimple_call_lhs (stmt))
1534 {
1535 if (wi)
1536 {
1537 wi->is_lhs = true;
1538 wi->val_only
1539 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1540 }
1541
1542 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1543 if (ret)
1544 return ret;
1545 }
1546
1547 if (wi)
1548 {
1549 wi->is_lhs = false;
1550 wi->val_only = true;
1551 }
1552 break;
1553
1554 case GIMPLE_CATCH:
1555 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1556 pset);
1557 if (ret)
1558 return ret;
1559 break;
1560
1561 case GIMPLE_EH_FILTER:
1562 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1563 pset);
1564 if (ret)
1565 return ret;
1566 break;
1567
1568 case GIMPLE_ASM:
1569 ret = walk_gimple_asm (stmt, callback_op, wi);
1570 if (ret)
1571 return ret;
1572 break;
1573
1574 case GIMPLE_OMP_CONTINUE:
1575 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1576 callback_op, wi, pset);
1577 if (ret)
1578 return ret;
1579
1580 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1581 callback_op, wi, pset);
1582 if (ret)
1583 return ret;
1584 break;
1585
1586 case GIMPLE_OMP_CRITICAL:
1587 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1588 pset);
1589 if (ret)
1590 return ret;
1591 break;
1592
1593 case GIMPLE_OMP_FOR:
1594 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1595 pset);
1596 if (ret)
1597 return ret;
1598 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1599 {
1600 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1601 wi, pset);
1602 if (ret)
1603 return ret;
1604 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1605 wi, pset);
1606 if (ret)
1607 return ret;
1608 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1609 wi, pset);
1610 if (ret)
1611 return ret;
1612 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1613 wi, pset);
1614 }
1615 if (ret)
1616 return ret;
1617 break;
1618
1619 case GIMPLE_OMP_PARALLEL:
1620 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1621 wi, pset);
1622 if (ret)
1623 return ret;
1624 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1625 wi, pset);
1626 if (ret)
1627 return ret;
1628 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1629 wi, pset);
1630 if (ret)
1631 return ret;
1632 break;
1633
1634 case GIMPLE_OMP_TASK:
1635 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1636 wi, pset);
1637 if (ret)
1638 return ret;
1639 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1640 wi, pset);
1641 if (ret)
1642 return ret;
1643 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1644 wi, pset);
1645 if (ret)
1646 return ret;
1647 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1648 wi, pset);
1649 if (ret)
1650 return ret;
1651 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1652 wi, pset);
1653 if (ret)
1654 return ret;
1655 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1656 wi, pset);
1657 if (ret)
1658 return ret;
1659 break;
1660
1661 case GIMPLE_OMP_SECTIONS:
1662 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1663 wi, pset);
1664 if (ret)
1665 return ret;
1666
1667 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1668 wi, pset);
1669 if (ret)
1670 return ret;
1671
1672 break;
1673
1674 case GIMPLE_OMP_SINGLE:
1675 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1676 pset);
1677 if (ret)
1678 return ret;
1679 break;
1680
1681 case GIMPLE_OMP_TARGET:
1682 ret = walk_tree (gimple_omp_target_clauses_ptr (stmt), callback_op, wi,
1683 pset);
1684 if (ret)
1685 return ret;
1686 break;
1687
1688 case GIMPLE_OMP_TEAMS:
1689 ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
1690 pset);
1691 if (ret)
1692 return ret;
1693 break;
1694
1695 case GIMPLE_OMP_ATOMIC_LOAD:
1696 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1697 pset);
1698 if (ret)
1699 return ret;
1700
1701 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1702 pset);
1703 if (ret)
1704 return ret;
1705 break;
1706
1707 case GIMPLE_OMP_ATOMIC_STORE:
1708 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1709 wi, pset);
1710 if (ret)
1711 return ret;
1712 break;
1713
1714 case GIMPLE_TRANSACTION:
1715 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1716 wi, pset);
1717 if (ret)
1718 return ret;
1719 break;
1720
1721 case GIMPLE_OMP_RETURN:
1722 ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
1723 pset);
1724 if (ret)
1725 return ret;
1726 break;
1727
1728 /* Tuples that do not have operands. */
1729 case GIMPLE_NOP:
1730 case GIMPLE_RESX:
1731 case GIMPLE_PREDICT:
1732 break;
1733
1734 default:
1735 {
1736 enum gimple_statement_structure_enum gss;
1737 gss = gimple_statement_structure (stmt);
1738 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1739 for (i = 0; i < gimple_num_ops (stmt); i++)
1740 {
1741 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1742 if (ret)
1743 return ret;
1744 }
1745 }
1746 break;
1747 }
1748
1749 return NULL_TREE;
1750 }
1751
1752
1753 /* Walk the current statement in GSI (optionally using traversal state
1754 stored in WI). If WI is NULL, no state is kept during traversal.
1755 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1756 that it has handled all the operands of the statement, its return
1757 value is returned. Otherwise, the return value from CALLBACK_STMT
1758 is discarded and its operands are scanned.
1759
1760 If CALLBACK_STMT is NULL or it didn't handle the operands,
1761 CALLBACK_OP is called on each operand of the statement via
1762 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1763 operand, the remaining operands are not scanned. In this case, the
1764 return value from CALLBACK_OP is returned.
1765
1766 In any other case, NULL_TREE is returned. */
1767
1768 tree
1769 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1770 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1771 {
1772 gimple ret;
1773 tree tree_ret;
1774 gimple stmt = gsi_stmt (*gsi);
1775
1776 if (wi)
1777 {
1778 wi->gsi = *gsi;
1779 wi->removed_stmt = false;
1780
1781 if (wi->want_locations && gimple_has_location (stmt))
1782 input_location = gimple_location (stmt);
1783 }
1784
1785 ret = NULL;
1786
1787 /* Invoke the statement callback. Return if the callback handled
1788 all of STMT operands by itself. */
1789 if (callback_stmt)
1790 {
1791 bool handled_ops = false;
1792 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1793 if (handled_ops)
1794 return tree_ret;
1795
1796 /* If CALLBACK_STMT did not handle operands, it should not have
1797 a value to return. */
1798 gcc_assert (tree_ret == NULL);
1799
1800 if (wi && wi->removed_stmt)
1801 return NULL;
1802
1803 /* Re-read stmt in case the callback changed it. */
1804 stmt = gsi_stmt (*gsi);
1805 }
1806
1807 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1808 if (callback_op)
1809 {
1810 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1811 if (tree_ret)
1812 return tree_ret;
1813 }
1814
1815 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1816 switch (gimple_code (stmt))
1817 {
1818 case GIMPLE_BIND:
1819 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
1820 callback_op, wi);
1821 if (ret)
1822 return wi->callback_result;
1823 break;
1824
1825 case GIMPLE_CATCH:
1826 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
1827 callback_op, wi);
1828 if (ret)
1829 return wi->callback_result;
1830 break;
1831
1832 case GIMPLE_EH_FILTER:
1833 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
1834 callback_op, wi);
1835 if (ret)
1836 return wi->callback_result;
1837 break;
1838
1839 case GIMPLE_EH_ELSE:
1840 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
1841 callback_stmt, callback_op, wi);
1842 if (ret)
1843 return wi->callback_result;
1844 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
1845 callback_stmt, callback_op, wi);
1846 if (ret)
1847 return wi->callback_result;
1848 break;
1849
1850 case GIMPLE_TRY:
1851 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
1852 wi);
1853 if (ret)
1854 return wi->callback_result;
1855
1856 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
1857 callback_op, wi);
1858 if (ret)
1859 return wi->callback_result;
1860 break;
1861
1862 case GIMPLE_OMP_FOR:
1863 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
1864 callback_op, wi);
1865 if (ret)
1866 return wi->callback_result;
1867
1868 /* FALL THROUGH. */
1869 case GIMPLE_OMP_CRITICAL:
1870 case GIMPLE_OMP_MASTER:
1871 case GIMPLE_OMP_TASKGROUP:
1872 case GIMPLE_OMP_ORDERED:
1873 case GIMPLE_OMP_SECTION:
1874 case GIMPLE_OMP_PARALLEL:
1875 case GIMPLE_OMP_TASK:
1876 case GIMPLE_OMP_SECTIONS:
1877 case GIMPLE_OMP_SINGLE:
1878 case GIMPLE_OMP_TARGET:
1879 case GIMPLE_OMP_TEAMS:
1880 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
1881 callback_op, wi);
1882 if (ret)
1883 return wi->callback_result;
1884 break;
1885
1886 case GIMPLE_WITH_CLEANUP_EXPR:
1887 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
1888 callback_op, wi);
1889 if (ret)
1890 return wi->callback_result;
1891 break;
1892
1893 case GIMPLE_TRANSACTION:
1894 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
1895 callback_stmt, callback_op, wi);
1896 if (ret)
1897 return wi->callback_result;
1898 break;
1899
1900 default:
1901 gcc_assert (!gimple_has_substatements (stmt));
1902 break;
1903 }
1904
1905 return NULL;
1906 }
1907
1908
1909 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1910
1911 void
1912 gimple_set_body (tree fndecl, gimple_seq seq)
1913 {
1914 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1915 if (fn == NULL)
1916 {
1917 /* If FNDECL still does not have a function structure associated
1918 with it, then it does not make sense for it to receive a
1919 GIMPLE body. */
1920 gcc_assert (seq == NULL);
1921 }
1922 else
1923 fn->gimple_body = seq;
1924 }
1925
1926
1927 /* Return the body of GIMPLE statements for function FN. After the
1928 CFG pass, the function body doesn't exist anymore because it has
1929 been split up into basic blocks. In this case, it returns
1930 NULL. */
1931
1932 gimple_seq
1933 gimple_body (tree fndecl)
1934 {
1935 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1936 return fn ? fn->gimple_body : NULL;
1937 }
1938
1939 /* Return true when FNDECL has Gimple body either in unlowered
1940 or CFG form. */
1941 bool
1942 gimple_has_body_p (tree fndecl)
1943 {
1944 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1945 return (gimple_body (fndecl) || (fn && fn->cfg));
1946 }
1947
1948 /* Return true if calls C1 and C2 are known to go to the same function. */
1949
1950 bool
1951 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1952 {
1953 if (gimple_call_internal_p (c1))
1954 return (gimple_call_internal_p (c2)
1955 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1956 else
1957 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1958 || (gimple_call_fndecl (c1)
1959 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1960 }
1961
1962 /* Detect flags from a GIMPLE_CALL. This is just like
1963 call_expr_flags, but for gimple tuples. */
1964
1965 int
1966 gimple_call_flags (const_gimple stmt)
1967 {
1968 int flags;
1969 tree decl = gimple_call_fndecl (stmt);
1970
1971 if (decl)
1972 flags = flags_from_decl_or_type (decl);
1973 else if (gimple_call_internal_p (stmt))
1974 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1975 else
1976 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1977
1978 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1979 flags |= ECF_NOTHROW;
1980
1981 return flags;
1982 }
1983
1984 /* Return the "fn spec" string for call STMT. */
1985
1986 static tree
1987 gimple_call_fnspec (const_gimple stmt)
1988 {
1989 tree type, attr;
1990
1991 type = gimple_call_fntype (stmt);
1992 if (!type)
1993 return NULL_TREE;
1994
1995 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1996 if (!attr)
1997 return NULL_TREE;
1998
1999 return TREE_VALUE (TREE_VALUE (attr));
2000 }
2001
2002 /* Detects argument flags for argument number ARG on call STMT. */
2003
2004 int
2005 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
2006 {
2007 tree attr = gimple_call_fnspec (stmt);
2008
2009 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
2010 return 0;
2011
2012 switch (TREE_STRING_POINTER (attr)[1 + arg])
2013 {
2014 case 'x':
2015 case 'X':
2016 return EAF_UNUSED;
2017
2018 case 'R':
2019 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
2020
2021 case 'r':
2022 return EAF_NOCLOBBER | EAF_NOESCAPE;
2023
2024 case 'W':
2025 return EAF_DIRECT | EAF_NOESCAPE;
2026
2027 case 'w':
2028 return EAF_NOESCAPE;
2029
2030 case '.':
2031 default:
2032 return 0;
2033 }
2034 }
2035
2036 /* Detects return flags for the call STMT. */
2037
2038 int
2039 gimple_call_return_flags (const_gimple stmt)
2040 {
2041 tree attr;
2042
2043 if (gimple_call_flags (stmt) & ECF_MALLOC)
2044 return ERF_NOALIAS;
2045
2046 attr = gimple_call_fnspec (stmt);
2047 if (!attr || TREE_STRING_LENGTH (attr) < 1)
2048 return 0;
2049
2050 switch (TREE_STRING_POINTER (attr)[0])
2051 {
2052 case '1':
2053 case '2':
2054 case '3':
2055 case '4':
2056 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2057
2058 case 'm':
2059 return ERF_NOALIAS;
2060
2061 case '.':
2062 default:
2063 return 0;
2064 }
2065 }
2066
2067
2068 /* Return true if GS is a copy assignment. */
2069
2070 bool
2071 gimple_assign_copy_p (gimple gs)
2072 {
2073 return (gimple_assign_single_p (gs)
2074 && is_gimple_val (gimple_op (gs, 1)));
2075 }
2076
2077
2078 /* Return true if GS is a SSA_NAME copy assignment. */
2079
2080 bool
2081 gimple_assign_ssa_name_copy_p (gimple gs)
2082 {
2083 return (gimple_assign_single_p (gs)
2084 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2085 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2086 }
2087
2088
2089 /* Return true if GS is an assignment with a unary RHS, but the
2090 operator has no effect on the assigned value. The logic is adapted
2091 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2092 instances in which STRIP_NOPS was previously applied to the RHS of
2093 an assignment.
2094
2095 NOTE: In the use cases that led to the creation of this function
2096 and of gimple_assign_single_p, it is typical to test for either
2097 condition and to proceed in the same manner. In each case, the
2098 assigned value is represented by the single RHS operand of the
2099 assignment. I suspect there may be cases where gimple_assign_copy_p,
2100 gimple_assign_single_p, or equivalent logic is used where a similar
2101 treatment of unary NOPs is appropriate. */
2102
2103 bool
2104 gimple_assign_unary_nop_p (gimple gs)
2105 {
2106 return (is_gimple_assign (gs)
2107 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2108 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2109 && gimple_assign_rhs1 (gs) != error_mark_node
2110 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2111 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2112 }
2113
2114 /* Set BB to be the basic block holding G. */
2115
2116 void
2117 gimple_set_bb (gimple stmt, basic_block bb)
2118 {
2119 stmt->gsbase.bb = bb;
2120
2121 /* If the statement is a label, add the label to block-to-labels map
2122 so that we can speed up edge creation for GIMPLE_GOTOs. */
2123 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2124 {
2125 tree t;
2126 int uid;
2127
2128 t = gimple_label_label (stmt);
2129 uid = LABEL_DECL_UID (t);
2130 if (uid == -1)
2131 {
2132 unsigned old_len = vec_safe_length (label_to_block_map);
2133 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2134 if (old_len <= (unsigned) uid)
2135 {
2136 unsigned new_len = 3 * uid / 2 + 1;
2137
2138 vec_safe_grow_cleared (label_to_block_map, new_len);
2139 }
2140 }
2141
2142 (*label_to_block_map)[uid] = bb;
2143 }
2144 }
2145
2146
2147 /* Modify the RHS of the assignment pointed-to by GSI using the
2148 operands in the expression tree EXPR.
2149
2150 NOTE: The statement pointed-to by GSI may be reallocated if it
2151 did not have enough operand slots.
2152
2153 This function is useful to convert an existing tree expression into
2154 the flat representation used for the RHS of a GIMPLE assignment.
2155 It will reallocate memory as needed to expand or shrink the number
2156 of operand slots needed to represent EXPR.
2157
2158 NOTE: If you find yourself building a tree and then calling this
2159 function, you are most certainly doing it the slow way. It is much
2160 better to build a new assignment or to use the function
2161 gimple_assign_set_rhs_with_ops, which does not require an
2162 expression tree to be built. */
2163
2164 void
2165 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2166 {
2167 enum tree_code subcode;
2168 tree op1, op2, op3;
2169
2170 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2171 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2172 }
2173
2174
2175 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2176 operands OP1, OP2 and OP3.
2177
2178 NOTE: The statement pointed-to by GSI may be reallocated if it
2179 did not have enough operand slots. */
2180
2181 void
2182 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2183 tree op1, tree op2, tree op3)
2184 {
2185 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2186 gimple stmt = gsi_stmt (*gsi);
2187
2188 /* If the new CODE needs more operands, allocate a new statement. */
2189 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2190 {
2191 tree lhs = gimple_assign_lhs (stmt);
2192 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2193 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2194 gimple_init_singleton (new_stmt);
2195 gsi_replace (gsi, new_stmt, true);
2196 stmt = new_stmt;
2197
2198 /* The LHS needs to be reset as this also changes the SSA name
2199 on the LHS. */
2200 gimple_assign_set_lhs (stmt, lhs);
2201 }
2202
2203 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2204 gimple_set_subcode (stmt, code);
2205 gimple_assign_set_rhs1 (stmt, op1);
2206 if (new_rhs_ops > 1)
2207 gimple_assign_set_rhs2 (stmt, op2);
2208 if (new_rhs_ops > 2)
2209 gimple_assign_set_rhs3 (stmt, op3);
2210 }
2211
2212
2213 /* Return the LHS of a statement that performs an assignment,
2214 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2215 for a call to a function that returns no value, or for a
2216 statement other than an assignment or a call. */
2217
2218 tree
2219 gimple_get_lhs (const_gimple stmt)
2220 {
2221 enum gimple_code code = gimple_code (stmt);
2222
2223 if (code == GIMPLE_ASSIGN)
2224 return gimple_assign_lhs (stmt);
2225 else if (code == GIMPLE_CALL)
2226 return gimple_call_lhs (stmt);
2227 else
2228 return NULL_TREE;
2229 }
2230
2231
2232 /* Set the LHS of a statement that performs an assignment,
2233 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2234
2235 void
2236 gimple_set_lhs (gimple stmt, tree lhs)
2237 {
2238 enum gimple_code code = gimple_code (stmt);
2239
2240 if (code == GIMPLE_ASSIGN)
2241 gimple_assign_set_lhs (stmt, lhs);
2242 else if (code == GIMPLE_CALL)
2243 gimple_call_set_lhs (stmt, lhs);
2244 else
2245 gcc_unreachable ();
2246 }
2247
2248
2249 /* Return a deep copy of statement STMT. All the operands from STMT
2250 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2251 and VUSE operand arrays are set to empty in the new copy. The new
2252 copy isn't part of any sequence. */
2253
2254 gimple
2255 gimple_copy (gimple stmt)
2256 {
2257 enum gimple_code code = gimple_code (stmt);
2258 unsigned num_ops = gimple_num_ops (stmt);
2259 gimple copy = gimple_alloc (code, num_ops);
2260 unsigned i;
2261
2262 /* Shallow copy all the fields from STMT. */
2263 memcpy (copy, stmt, gimple_size (code));
2264 gimple_init_singleton (copy);
2265
2266 /* If STMT has sub-statements, deep-copy them as well. */
2267 if (gimple_has_substatements (stmt))
2268 {
2269 gimple_seq new_seq;
2270 tree t;
2271
2272 switch (gimple_code (stmt))
2273 {
2274 case GIMPLE_BIND:
2275 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2276 gimple_bind_set_body (copy, new_seq);
2277 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2278 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2279 break;
2280
2281 case GIMPLE_CATCH:
2282 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2283 gimple_catch_set_handler (copy, new_seq);
2284 t = unshare_expr (gimple_catch_types (stmt));
2285 gimple_catch_set_types (copy, t);
2286 break;
2287
2288 case GIMPLE_EH_FILTER:
2289 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2290 gimple_eh_filter_set_failure (copy, new_seq);
2291 t = unshare_expr (gimple_eh_filter_types (stmt));
2292 gimple_eh_filter_set_types (copy, t);
2293 break;
2294
2295 case GIMPLE_EH_ELSE:
2296 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2297 gimple_eh_else_set_n_body (copy, new_seq);
2298 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2299 gimple_eh_else_set_e_body (copy, new_seq);
2300 break;
2301
2302 case GIMPLE_TRY:
2303 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2304 gimple_try_set_eval (copy, new_seq);
2305 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2306 gimple_try_set_cleanup (copy, new_seq);
2307 break;
2308
2309 case GIMPLE_OMP_FOR:
2310 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2311 gimple_omp_for_set_pre_body (copy, new_seq);
2312 t = unshare_expr (gimple_omp_for_clauses (stmt));
2313 gimple_omp_for_set_clauses (copy, t);
2314 copy->gimple_omp_for.iter
2315 = ggc_alloc_vec_gimple_omp_for_iter
2316 (gimple_omp_for_collapse (stmt));
2317 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2318 {
2319 gimple_omp_for_set_cond (copy, i,
2320 gimple_omp_for_cond (stmt, i));
2321 gimple_omp_for_set_index (copy, i,
2322 gimple_omp_for_index (stmt, i));
2323 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2324 gimple_omp_for_set_initial (copy, i, t);
2325 t = unshare_expr (gimple_omp_for_final (stmt, i));
2326 gimple_omp_for_set_final (copy, i, t);
2327 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2328 gimple_omp_for_set_incr (copy, i, t);
2329 }
2330 goto copy_omp_body;
2331
2332 case GIMPLE_OMP_PARALLEL:
2333 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2334 gimple_omp_parallel_set_clauses (copy, t);
2335 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2336 gimple_omp_parallel_set_child_fn (copy, t);
2337 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2338 gimple_omp_parallel_set_data_arg (copy, t);
2339 goto copy_omp_body;
2340
2341 case GIMPLE_OMP_TASK:
2342 t = unshare_expr (gimple_omp_task_clauses (stmt));
2343 gimple_omp_task_set_clauses (copy, t);
2344 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2345 gimple_omp_task_set_child_fn (copy, t);
2346 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2347 gimple_omp_task_set_data_arg (copy, t);
2348 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2349 gimple_omp_task_set_copy_fn (copy, t);
2350 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2351 gimple_omp_task_set_arg_size (copy, t);
2352 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2353 gimple_omp_task_set_arg_align (copy, t);
2354 goto copy_omp_body;
2355
2356 case GIMPLE_OMP_CRITICAL:
2357 t = unshare_expr (gimple_omp_critical_name (stmt));
2358 gimple_omp_critical_set_name (copy, t);
2359 goto copy_omp_body;
2360
2361 case GIMPLE_OMP_SECTIONS:
2362 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2363 gimple_omp_sections_set_clauses (copy, t);
2364 t = unshare_expr (gimple_omp_sections_control (stmt));
2365 gimple_omp_sections_set_control (copy, t);
2366 /* FALLTHRU */
2367
2368 case GIMPLE_OMP_SINGLE:
2369 case GIMPLE_OMP_TARGET:
2370 case GIMPLE_OMP_TEAMS:
2371 case GIMPLE_OMP_SECTION:
2372 case GIMPLE_OMP_MASTER:
2373 case GIMPLE_OMP_TASKGROUP:
2374 case GIMPLE_OMP_ORDERED:
2375 copy_omp_body:
2376 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2377 gimple_omp_set_body (copy, new_seq);
2378 break;
2379
2380 case GIMPLE_TRANSACTION:
2381 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2382 gimple_transaction_set_body (copy, new_seq);
2383 break;
2384
2385 case GIMPLE_WITH_CLEANUP_EXPR:
2386 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2387 gimple_wce_set_cleanup (copy, new_seq);
2388 break;
2389
2390 default:
2391 gcc_unreachable ();
2392 }
2393 }
2394
2395 /* Make copy of operands. */
2396 for (i = 0; i < num_ops; i++)
2397 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2398
2399 if (gimple_has_mem_ops (stmt))
2400 {
2401 gimple_set_vdef (copy, gimple_vdef (stmt));
2402 gimple_set_vuse (copy, gimple_vuse (stmt));
2403 }
2404
2405 /* Clear out SSA operand vectors on COPY. */
2406 if (gimple_has_ops (stmt))
2407 {
2408 gimple_set_use_ops (copy, NULL);
2409
2410 /* SSA operands need to be updated. */
2411 gimple_set_modified (copy, true);
2412 }
2413
2414 return copy;
2415 }
2416
2417
2418 /* Return true if statement S has side-effects. We consider a
2419 statement to have side effects if:
2420
2421 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2422 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2423
2424 bool
2425 gimple_has_side_effects (const_gimple s)
2426 {
2427 if (is_gimple_debug (s))
2428 return false;
2429
2430 /* We don't have to scan the arguments to check for
2431 volatile arguments, though, at present, we still
2432 do a scan to check for TREE_SIDE_EFFECTS. */
2433 if (gimple_has_volatile_ops (s))
2434 return true;
2435
2436 if (gimple_code (s) == GIMPLE_ASM
2437 && gimple_asm_volatile_p (s))
2438 return true;
2439
2440 if (is_gimple_call (s))
2441 {
2442 int flags = gimple_call_flags (s);
2443
2444 /* An infinite loop is considered a side effect. */
2445 if (!(flags & (ECF_CONST | ECF_PURE))
2446 || (flags & ECF_LOOPING_CONST_OR_PURE))
2447 return true;
2448
2449 return false;
2450 }
2451
2452 return false;
2453 }
2454
2455 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2456 Return true if S can trap. When INCLUDE_MEM is true, check whether
2457 the memory operations could trap. When INCLUDE_STORES is true and
2458 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2459
2460 bool
2461 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2462 {
2463 tree t, div = NULL_TREE;
2464 enum tree_code op;
2465
2466 if (include_mem)
2467 {
2468 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2469
2470 for (i = start; i < gimple_num_ops (s); i++)
2471 if (tree_could_trap_p (gimple_op (s, i)))
2472 return true;
2473 }
2474
2475 switch (gimple_code (s))
2476 {
2477 case GIMPLE_ASM:
2478 return gimple_asm_volatile_p (s);
2479
2480 case GIMPLE_CALL:
2481 t = gimple_call_fndecl (s);
2482 /* Assume that calls to weak functions may trap. */
2483 if (!t || !DECL_P (t) || DECL_WEAK (t))
2484 return true;
2485 return false;
2486
2487 case GIMPLE_ASSIGN:
2488 t = gimple_expr_type (s);
2489 op = gimple_assign_rhs_code (s);
2490 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2491 div = gimple_assign_rhs2 (s);
2492 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2493 (INTEGRAL_TYPE_P (t)
2494 && TYPE_OVERFLOW_TRAPS (t)),
2495 div));
2496
2497 default:
2498 break;
2499 }
2500
2501 return false;
2502 }
2503
2504 /* Return true if statement S can trap. */
2505
2506 bool
2507 gimple_could_trap_p (gimple s)
2508 {
2509 return gimple_could_trap_p_1 (s, true, true);
2510 }
2511
2512 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2513
2514 bool
2515 gimple_assign_rhs_could_trap_p (gimple s)
2516 {
2517 gcc_assert (is_gimple_assign (s));
2518 return gimple_could_trap_p_1 (s, true, false);
2519 }
2520
2521
2522 /* Print debugging information for gimple stmts generated. */
2523
2524 void
2525 dump_gimple_statistics (void)
2526 {
2527 int i, total_tuples = 0, total_bytes = 0;
2528
2529 if (! GATHER_STATISTICS)
2530 {
2531 fprintf (stderr, "No gimple statistics\n");
2532 return;
2533 }
2534
2535 fprintf (stderr, "\nGIMPLE statements\n");
2536 fprintf (stderr, "Kind Stmts Bytes\n");
2537 fprintf (stderr, "---------------------------------------\n");
2538 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2539 {
2540 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2541 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2542 total_tuples += gimple_alloc_counts[i];
2543 total_bytes += gimple_alloc_sizes[i];
2544 }
2545 fprintf (stderr, "---------------------------------------\n");
2546 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2547 fprintf (stderr, "---------------------------------------\n");
2548 }
2549
2550
2551 /* Return the number of operands needed on the RHS of a GIMPLE
2552 assignment for an expression with tree code CODE. */
2553
2554 unsigned
2555 get_gimple_rhs_num_ops (enum tree_code code)
2556 {
2557 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2558
2559 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2560 return 1;
2561 else if (rhs_class == GIMPLE_BINARY_RHS)
2562 return 2;
2563 else if (rhs_class == GIMPLE_TERNARY_RHS)
2564 return 3;
2565 else
2566 gcc_unreachable ();
2567 }
2568
2569 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2570 (unsigned char) \
2571 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2572 : ((TYPE) == tcc_binary \
2573 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2574 : ((TYPE) == tcc_constant \
2575 || (TYPE) == tcc_declaration \
2576 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2577 : ((SYM) == TRUTH_AND_EXPR \
2578 || (SYM) == TRUTH_OR_EXPR \
2579 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2580 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2581 : ((SYM) == COND_EXPR \
2582 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2583 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2584 || (SYM) == DOT_PROD_EXPR \
2585 || (SYM) == REALIGN_LOAD_EXPR \
2586 || (SYM) == VEC_COND_EXPR \
2587 || (SYM) == VEC_PERM_EXPR \
2588 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2589 : ((SYM) == CONSTRUCTOR \
2590 || (SYM) == OBJ_TYPE_REF \
2591 || (SYM) == ASSERT_EXPR \
2592 || (SYM) == ADDR_EXPR \
2593 || (SYM) == WITH_SIZE_EXPR \
2594 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2595 : GIMPLE_INVALID_RHS),
2596 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2597
2598 const unsigned char gimple_rhs_class_table[] = {
2599 #include "all-tree.def"
2600 };
2601
2602 #undef DEFTREECODE
2603 #undef END_OF_BASE_TREE_CODES
2604
2605 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2606
2607 /* Validation of GIMPLE expressions. */
2608
2609 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2610
2611 bool
2612 is_gimple_lvalue (tree t)
2613 {
2614 return (is_gimple_addressable (t)
2615 || TREE_CODE (t) == WITH_SIZE_EXPR
2616 /* These are complex lvalues, but don't have addresses, so they
2617 go here. */
2618 || TREE_CODE (t) == BIT_FIELD_REF);
2619 }
2620
2621 /* Return true if T is a GIMPLE condition. */
2622
2623 bool
2624 is_gimple_condexpr (tree t)
2625 {
2626 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2627 && !tree_could_throw_p (t)
2628 && is_gimple_val (TREE_OPERAND (t, 0))
2629 && is_gimple_val (TREE_OPERAND (t, 1))));
2630 }
2631
2632 /* Return true if T is something whose address can be taken. */
2633
2634 bool
2635 is_gimple_addressable (tree t)
2636 {
2637 return (is_gimple_id (t) || handled_component_p (t)
2638 || TREE_CODE (t) == MEM_REF);
2639 }
2640
2641 /* Return true if T is a valid gimple constant. */
2642
2643 bool
2644 is_gimple_constant (const_tree t)
2645 {
2646 switch (TREE_CODE (t))
2647 {
2648 case INTEGER_CST:
2649 case REAL_CST:
2650 case FIXED_CST:
2651 case STRING_CST:
2652 case COMPLEX_CST:
2653 case VECTOR_CST:
2654 return true;
2655
2656 default:
2657 return false;
2658 }
2659 }
2660
2661 /* Return true if T is a gimple address. */
2662
2663 bool
2664 is_gimple_address (const_tree t)
2665 {
2666 tree op;
2667
2668 if (TREE_CODE (t) != ADDR_EXPR)
2669 return false;
2670
2671 op = TREE_OPERAND (t, 0);
2672 while (handled_component_p (op))
2673 {
2674 if ((TREE_CODE (op) == ARRAY_REF
2675 || TREE_CODE (op) == ARRAY_RANGE_REF)
2676 && !is_gimple_val (TREE_OPERAND (op, 1)))
2677 return false;
2678
2679 op = TREE_OPERAND (op, 0);
2680 }
2681
2682 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2683 return true;
2684
2685 switch (TREE_CODE (op))
2686 {
2687 case PARM_DECL:
2688 case RESULT_DECL:
2689 case LABEL_DECL:
2690 case FUNCTION_DECL:
2691 case VAR_DECL:
2692 case CONST_DECL:
2693 return true;
2694
2695 default:
2696 return false;
2697 }
2698 }
2699
2700 /* Return true if T is a gimple invariant address. */
2701
2702 bool
2703 is_gimple_invariant_address (const_tree t)
2704 {
2705 const_tree op;
2706
2707 if (TREE_CODE (t) != ADDR_EXPR)
2708 return false;
2709
2710 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2711 if (!op)
2712 return false;
2713
2714 if (TREE_CODE (op) == MEM_REF)
2715 {
2716 const_tree op0 = TREE_OPERAND (op, 0);
2717 return (TREE_CODE (op0) == ADDR_EXPR
2718 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2719 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2720 }
2721
2722 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2723 }
2724
2725 /* Return true if T is a gimple invariant address at IPA level
2726 (so addresses of variables on stack are not allowed). */
2727
2728 bool
2729 is_gimple_ip_invariant_address (const_tree t)
2730 {
2731 const_tree op;
2732
2733 if (TREE_CODE (t) != ADDR_EXPR)
2734 return false;
2735
2736 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2737 if (!op)
2738 return false;
2739
2740 if (TREE_CODE (op) == MEM_REF)
2741 {
2742 const_tree op0 = TREE_OPERAND (op, 0);
2743 return (TREE_CODE (op0) == ADDR_EXPR
2744 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2745 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2746 }
2747
2748 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2749 }
2750
2751 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2752 form of function invariant. */
2753
2754 bool
2755 is_gimple_min_invariant (const_tree t)
2756 {
2757 if (TREE_CODE (t) == ADDR_EXPR)
2758 return is_gimple_invariant_address (t);
2759
2760 return is_gimple_constant (t);
2761 }
2762
2763 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2764 form of gimple minimal invariant. */
2765
2766 bool
2767 is_gimple_ip_invariant (const_tree t)
2768 {
2769 if (TREE_CODE (t) == ADDR_EXPR)
2770 return is_gimple_ip_invariant_address (t);
2771
2772 return is_gimple_constant (t);
2773 }
2774
2775 /* Return true if T is a variable. */
2776
2777 bool
2778 is_gimple_variable (tree t)
2779 {
2780 return (TREE_CODE (t) == VAR_DECL
2781 || TREE_CODE (t) == PARM_DECL
2782 || TREE_CODE (t) == RESULT_DECL
2783 || TREE_CODE (t) == SSA_NAME);
2784 }
2785
2786 /* Return true if T is a GIMPLE identifier (something with an address). */
2787
2788 bool
2789 is_gimple_id (tree t)
2790 {
2791 return (is_gimple_variable (t)
2792 || TREE_CODE (t) == FUNCTION_DECL
2793 || TREE_CODE (t) == LABEL_DECL
2794 || TREE_CODE (t) == CONST_DECL
2795 /* Allow string constants, since they are addressable. */
2796 || TREE_CODE (t) == STRING_CST);
2797 }
2798
2799 /* Return true if OP, an SSA name or a DECL is a virtual operand. */
2800
2801 bool
2802 virtual_operand_p (tree op)
2803 {
2804 if (TREE_CODE (op) == SSA_NAME)
2805 {
2806 op = SSA_NAME_VAR (op);
2807 if (!op)
2808 return false;
2809 }
2810
2811 if (TREE_CODE (op) == VAR_DECL)
2812 return VAR_DECL_IS_VIRTUAL_OPERAND (op);
2813
2814 return false;
2815 }
2816
2817
2818 /* Return true if T is a non-aggregate register variable. */
2819
2820 bool
2821 is_gimple_reg (tree t)
2822 {
2823 if (virtual_operand_p (t))
2824 return false;
2825
2826 if (TREE_CODE (t) == SSA_NAME)
2827 return true;
2828
2829 if (!is_gimple_variable (t))
2830 return false;
2831
2832 if (!is_gimple_reg_type (TREE_TYPE (t)))
2833 return false;
2834
2835 /* A volatile decl is not acceptable because we can't reuse it as
2836 needed. We need to copy it into a temp first. */
2837 if (TREE_THIS_VOLATILE (t))
2838 return false;
2839
2840 /* We define "registers" as things that can be renamed as needed,
2841 which with our infrastructure does not apply to memory. */
2842 if (needs_to_live_in_memory (t))
2843 return false;
2844
2845 /* Hard register variables are an interesting case. For those that
2846 are call-clobbered, we don't know where all the calls are, since
2847 we don't (want to) take into account which operations will turn
2848 into libcalls at the rtl level. For those that are call-saved,
2849 we don't currently model the fact that calls may in fact change
2850 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2851 level, and so miss variable changes that might imply. All around,
2852 it seems safest to not do too much optimization with these at the
2853 tree level at all. We'll have to rely on the rtl optimizers to
2854 clean this up, as there we've got all the appropriate bits exposed. */
2855 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2856 return false;
2857
2858 /* Complex and vector values must have been put into SSA-like form.
2859 That is, no assignments to the individual components. */
2860 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2861 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2862 return DECL_GIMPLE_REG_P (t);
2863
2864 return true;
2865 }
2866
2867
2868 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2869
2870 bool
2871 is_gimple_val (tree t)
2872 {
2873 /* Make loads from volatiles and memory vars explicit. */
2874 if (is_gimple_variable (t)
2875 && is_gimple_reg_type (TREE_TYPE (t))
2876 && !is_gimple_reg (t))
2877 return false;
2878
2879 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2880 }
2881
2882 /* Similarly, but accept hard registers as inputs to asm statements. */
2883
2884 bool
2885 is_gimple_asm_val (tree t)
2886 {
2887 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2888 return true;
2889
2890 return is_gimple_val (t);
2891 }
2892
2893 /* Return true if T is a GIMPLE minimal lvalue. */
2894
2895 bool
2896 is_gimple_min_lval (tree t)
2897 {
2898 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2899 return false;
2900 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2901 }
2902
2903 /* Return true if T is a valid function operand of a CALL_EXPR. */
2904
2905 bool
2906 is_gimple_call_addr (tree t)
2907 {
2908 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2909 }
2910
2911 /* Return true if T is a valid address operand of a MEM_REF. */
2912
2913 bool
2914 is_gimple_mem_ref_addr (tree t)
2915 {
2916 return (is_gimple_reg (t)
2917 || TREE_CODE (t) == INTEGER_CST
2918 || (TREE_CODE (t) == ADDR_EXPR
2919 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2920 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2921 }
2922
2923
2924 /* Given a memory reference expression T, return its base address.
2925 The base address of a memory reference expression is the main
2926 object being referenced. For instance, the base address for
2927 'array[i].fld[j]' is 'array'. You can think of this as stripping
2928 away the offset part from a memory address.
2929
2930 This function calls handled_component_p to strip away all the inner
2931 parts of the memory reference until it reaches the base object. */
2932
2933 tree
2934 get_base_address (tree t)
2935 {
2936 while (handled_component_p (t))
2937 t = TREE_OPERAND (t, 0);
2938
2939 if ((TREE_CODE (t) == MEM_REF
2940 || TREE_CODE (t) == TARGET_MEM_REF)
2941 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2942 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2943
2944 /* ??? Either the alias oracle or all callers need to properly deal
2945 with WITH_SIZE_EXPRs before we can look through those. */
2946 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2947 return NULL_TREE;
2948
2949 return t;
2950 }
2951
2952 void
2953 recalculate_side_effects (tree t)
2954 {
2955 enum tree_code code = TREE_CODE (t);
2956 int len = TREE_OPERAND_LENGTH (t);
2957 int i;
2958
2959 switch (TREE_CODE_CLASS (code))
2960 {
2961 case tcc_expression:
2962 switch (code)
2963 {
2964 case INIT_EXPR:
2965 case MODIFY_EXPR:
2966 case VA_ARG_EXPR:
2967 case PREDECREMENT_EXPR:
2968 case PREINCREMENT_EXPR:
2969 case POSTDECREMENT_EXPR:
2970 case POSTINCREMENT_EXPR:
2971 /* All of these have side-effects, no matter what their
2972 operands are. */
2973 return;
2974
2975 default:
2976 break;
2977 }
2978 /* Fall through. */
2979
2980 case tcc_comparison: /* a comparison expression */
2981 case tcc_unary: /* a unary arithmetic expression */
2982 case tcc_binary: /* a binary arithmetic expression */
2983 case tcc_reference: /* a reference */
2984 case tcc_vl_exp: /* a function call */
2985 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2986 for (i = 0; i < len; ++i)
2987 {
2988 tree op = TREE_OPERAND (t, i);
2989 if (op && TREE_SIDE_EFFECTS (op))
2990 TREE_SIDE_EFFECTS (t) = 1;
2991 }
2992 break;
2993
2994 case tcc_constant:
2995 /* No side-effects. */
2996 return;
2997
2998 default:
2999 gcc_unreachable ();
3000 }
3001 }
3002
3003 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3004 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3005 we failed to create one. */
3006
3007 tree
3008 canonicalize_cond_expr_cond (tree t)
3009 {
3010 /* Strip conversions around boolean operations. */
3011 if (CONVERT_EXPR_P (t)
3012 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
3013 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3014 == BOOLEAN_TYPE))
3015 t = TREE_OPERAND (t, 0);
3016
3017 /* For !x use x == 0. */
3018 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3019 {
3020 tree top0 = TREE_OPERAND (t, 0);
3021 t = build2 (EQ_EXPR, TREE_TYPE (t),
3022 top0, build_int_cst (TREE_TYPE (top0), 0));
3023 }
3024 /* For cmp ? 1 : 0 use cmp. */
3025 else if (TREE_CODE (t) == COND_EXPR
3026 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3027 && integer_onep (TREE_OPERAND (t, 1))
3028 && integer_zerop (TREE_OPERAND (t, 2)))
3029 {
3030 tree top0 = TREE_OPERAND (t, 0);
3031 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3032 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3033 }
3034 /* For x ^ y use x != y. */
3035 else if (TREE_CODE (t) == BIT_XOR_EXPR)
3036 t = build2 (NE_EXPR, TREE_TYPE (t),
3037 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
3038
3039 if (is_gimple_condexpr (t))
3040 return t;
3041
3042 return NULL_TREE;
3043 }
3044
3045 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3046 the positions marked by the set ARGS_TO_SKIP. */
3047
3048 gimple
3049 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3050 {
3051 int i;
3052 int nargs = gimple_call_num_args (stmt);
3053 vec<tree> vargs;
3054 vargs.create (nargs);
3055 gimple new_stmt;
3056
3057 for (i = 0; i < nargs; i++)
3058 if (!bitmap_bit_p (args_to_skip, i))
3059 vargs.quick_push (gimple_call_arg (stmt, i));
3060
3061 if (gimple_call_internal_p (stmt))
3062 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3063 vargs);
3064 else
3065 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3066 vargs.release ();
3067 if (gimple_call_lhs (stmt))
3068 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3069
3070 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3071 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3072
3073 if (gimple_has_location (stmt))
3074 gimple_set_location (new_stmt, gimple_location (stmt));
3075 gimple_call_copy_flags (new_stmt, stmt);
3076 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3077
3078 gimple_set_modified (new_stmt, true);
3079
3080 return new_stmt;
3081 }
3082
3083
3084
3085 /* Return true if the field decls F1 and F2 are at the same offset.
3086
3087 This is intended to be used on GIMPLE types only. */
3088
3089 bool
3090 gimple_compare_field_offset (tree f1, tree f2)
3091 {
3092 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3093 {
3094 tree offset1 = DECL_FIELD_OFFSET (f1);
3095 tree offset2 = DECL_FIELD_OFFSET (f2);
3096 return ((offset1 == offset2
3097 /* Once gimplification is done, self-referential offsets are
3098 instantiated as operand #2 of the COMPONENT_REF built for
3099 each access and reset. Therefore, they are not relevant
3100 anymore and fields are interchangeable provided that they
3101 represent the same access. */
3102 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3103 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3104 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3105 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3106 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3107 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3108 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3109 || operand_equal_p (offset1, offset2, 0))
3110 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3111 DECL_FIELD_BIT_OFFSET (f2)));
3112 }
3113
3114 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3115 should be, so handle differing ones specially by decomposing
3116 the offset into a byte and bit offset manually. */
3117 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3118 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3119 {
3120 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3121 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3122 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3123 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3124 + bit_offset1 / BITS_PER_UNIT);
3125 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3126 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3127 + bit_offset2 / BITS_PER_UNIT);
3128 if (byte_offset1 != byte_offset2)
3129 return false;
3130 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3131 }
3132
3133 return false;
3134 }
3135
3136
3137 /* Return a type the same as TYPE except unsigned or
3138 signed according to UNSIGNEDP. */
3139
3140 static tree
3141 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
3142 {
3143 tree type1;
3144
3145 type1 = TYPE_MAIN_VARIANT (type);
3146 if (type1 == signed_char_type_node
3147 || type1 == char_type_node
3148 || type1 == unsigned_char_type_node)
3149 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3150 if (type1 == integer_type_node || type1 == unsigned_type_node)
3151 return unsignedp ? unsigned_type_node : integer_type_node;
3152 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
3153 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3154 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
3155 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3156 if (type1 == long_long_integer_type_node
3157 || type1 == long_long_unsigned_type_node)
3158 return unsignedp
3159 ? long_long_unsigned_type_node
3160 : long_long_integer_type_node;
3161 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
3162 return unsignedp
3163 ? int128_unsigned_type_node
3164 : int128_integer_type_node;
3165 #if HOST_BITS_PER_WIDE_INT >= 64
3166 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
3167 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3168 #endif
3169 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
3170 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3171 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
3172 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3173 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
3174 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3175 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
3176 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3177
3178 #define GIMPLE_FIXED_TYPES(NAME) \
3179 if (type1 == short_ ## NAME ## _type_node \
3180 || type1 == unsigned_short_ ## NAME ## _type_node) \
3181 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
3182 : short_ ## NAME ## _type_node; \
3183 if (type1 == NAME ## _type_node \
3184 || type1 == unsigned_ ## NAME ## _type_node) \
3185 return unsignedp ? unsigned_ ## NAME ## _type_node \
3186 : NAME ## _type_node; \
3187 if (type1 == long_ ## NAME ## _type_node \
3188 || type1 == unsigned_long_ ## NAME ## _type_node) \
3189 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
3190 : long_ ## NAME ## _type_node; \
3191 if (type1 == long_long_ ## NAME ## _type_node \
3192 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
3193 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
3194 : long_long_ ## NAME ## _type_node;
3195
3196 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
3197 if (type1 == NAME ## _type_node \
3198 || type1 == u ## NAME ## _type_node) \
3199 return unsignedp ? u ## NAME ## _type_node \
3200 : NAME ## _type_node;
3201
3202 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
3203 if (type1 == sat_ ## short_ ## NAME ## _type_node \
3204 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
3205 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
3206 : sat_ ## short_ ## NAME ## _type_node; \
3207 if (type1 == sat_ ## NAME ## _type_node \
3208 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
3209 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
3210 : sat_ ## NAME ## _type_node; \
3211 if (type1 == sat_ ## long_ ## NAME ## _type_node \
3212 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
3213 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
3214 : sat_ ## long_ ## NAME ## _type_node; \
3215 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
3216 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
3217 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
3218 : sat_ ## long_long_ ## NAME ## _type_node;
3219
3220 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
3221 if (type1 == sat_ ## NAME ## _type_node \
3222 || type1 == sat_ ## u ## NAME ## _type_node) \
3223 return unsignedp ? sat_ ## u ## NAME ## _type_node \
3224 : sat_ ## NAME ## _type_node;
3225
3226 GIMPLE_FIXED_TYPES (fract);
3227 GIMPLE_FIXED_TYPES_SAT (fract);
3228 GIMPLE_FIXED_TYPES (accum);
3229 GIMPLE_FIXED_TYPES_SAT (accum);
3230
3231 GIMPLE_FIXED_MODE_TYPES (qq);
3232 GIMPLE_FIXED_MODE_TYPES (hq);
3233 GIMPLE_FIXED_MODE_TYPES (sq);
3234 GIMPLE_FIXED_MODE_TYPES (dq);
3235 GIMPLE_FIXED_MODE_TYPES (tq);
3236 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
3237 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
3238 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
3239 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
3240 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
3241 GIMPLE_FIXED_MODE_TYPES (ha);
3242 GIMPLE_FIXED_MODE_TYPES (sa);
3243 GIMPLE_FIXED_MODE_TYPES (da);
3244 GIMPLE_FIXED_MODE_TYPES (ta);
3245 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
3246 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
3247 GIMPLE_FIXED_MODE_TYPES_SAT (da);
3248 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
3249
3250 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
3251 the precision; they have precision set to match their range, but
3252 may use a wider mode to match an ABI. If we change modes, we may
3253 wind up with bad conversions. For INTEGER_TYPEs in C, must check
3254 the precision as well, so as to yield correct results for
3255 bit-field types. C++ does not have these separate bit-field
3256 types, and producing a signed or unsigned variant of an
3257 ENUMERAL_TYPE may cause other problems as well. */
3258 if (!INTEGRAL_TYPE_P (type)
3259 || TYPE_UNSIGNED (type) == unsignedp)
3260 return type;
3261
3262 #define TYPE_OK(node) \
3263 (TYPE_MODE (type) == TYPE_MODE (node) \
3264 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
3265 if (TYPE_OK (signed_char_type_node))
3266 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3267 if (TYPE_OK (integer_type_node))
3268 return unsignedp ? unsigned_type_node : integer_type_node;
3269 if (TYPE_OK (short_integer_type_node))
3270 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3271 if (TYPE_OK (long_integer_type_node))
3272 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3273 if (TYPE_OK (long_long_integer_type_node))
3274 return (unsignedp
3275 ? long_long_unsigned_type_node
3276 : long_long_integer_type_node);
3277 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
3278 return (unsignedp
3279 ? int128_unsigned_type_node
3280 : int128_integer_type_node);
3281
3282 #if HOST_BITS_PER_WIDE_INT >= 64
3283 if (TYPE_OK (intTI_type_node))
3284 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3285 #endif
3286 if (TYPE_OK (intDI_type_node))
3287 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3288 if (TYPE_OK (intSI_type_node))
3289 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3290 if (TYPE_OK (intHI_type_node))
3291 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3292 if (TYPE_OK (intQI_type_node))
3293 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3294
3295 #undef GIMPLE_FIXED_TYPES
3296 #undef GIMPLE_FIXED_MODE_TYPES
3297 #undef GIMPLE_FIXED_TYPES_SAT
3298 #undef GIMPLE_FIXED_MODE_TYPES_SAT
3299 #undef TYPE_OK
3300
3301 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
3302 }
3303
3304
3305 /* Return an unsigned type the same as TYPE in other respects. */
3306
3307 tree
3308 gimple_unsigned_type (tree type)
3309 {
3310 return gimple_signed_or_unsigned_type (true, type);
3311 }
3312
3313
3314 /* Return a signed type the same as TYPE in other respects. */
3315
3316 tree
3317 gimple_signed_type (tree type)
3318 {
3319 return gimple_signed_or_unsigned_type (false, type);
3320 }
3321
3322
3323 /* Return the typed-based alias set for T, which may be an expression
3324 or a type. Return -1 if we don't do anything special. */
3325
3326 alias_set_type
3327 gimple_get_alias_set (tree t)
3328 {
3329 tree u;
3330
3331 /* Permit type-punning when accessing a union, provided the access
3332 is directly through the union. For example, this code does not
3333 permit taking the address of a union member and then storing
3334 through it. Even the type-punning allowed here is a GCC
3335 extension, albeit a common and useful one; the C standard says
3336 that such accesses have implementation-defined behavior. */
3337 for (u = t;
3338 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3339 u = TREE_OPERAND (u, 0))
3340 if (TREE_CODE (u) == COMPONENT_REF
3341 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3342 return 0;
3343
3344 /* That's all the expressions we handle specially. */
3345 if (!TYPE_P (t))
3346 return -1;
3347
3348 /* For convenience, follow the C standard when dealing with
3349 character types. Any object may be accessed via an lvalue that
3350 has character type. */
3351 if (t == char_type_node
3352 || t == signed_char_type_node
3353 || t == unsigned_char_type_node)
3354 return 0;
3355
3356 /* Allow aliasing between signed and unsigned variants of the same
3357 type. We treat the signed variant as canonical. */
3358 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
3359 {
3360 tree t1 = gimple_signed_type (t);
3361
3362 /* t1 == t can happen for boolean nodes which are always unsigned. */
3363 if (t1 != t)
3364 return get_alias_set (t1);
3365 }
3366
3367 return -1;
3368 }
3369
3370
3371 /* From a tree operand OP return the base of a load or store operation
3372 or NULL_TREE if OP is not a load or a store. */
3373
3374 static tree
3375 get_base_loadstore (tree op)
3376 {
3377 while (handled_component_p (op))
3378 op = TREE_OPERAND (op, 0);
3379 if (DECL_P (op)
3380 || INDIRECT_REF_P (op)
3381 || TREE_CODE (op) == MEM_REF
3382 || TREE_CODE (op) == TARGET_MEM_REF)
3383 return op;
3384 return NULL_TREE;
3385 }
3386
3387 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3388 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3389 passing the STMT, the base of the operand and DATA to it. The base
3390 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3391 or the argument of an address expression.
3392 Returns the results of these callbacks or'ed. */
3393
3394 bool
3395 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3396 bool (*visit_load)(gimple, tree, void *),
3397 bool (*visit_store)(gimple, tree, void *),
3398 bool (*visit_addr)(gimple, tree, void *))
3399 {
3400 bool ret = false;
3401 unsigned i;
3402 if (gimple_assign_single_p (stmt))
3403 {
3404 tree lhs, rhs;
3405 if (visit_store)
3406 {
3407 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3408 if (lhs)
3409 ret |= visit_store (stmt, lhs, data);
3410 }
3411 rhs = gimple_assign_rhs1 (stmt);
3412 while (handled_component_p (rhs))
3413 rhs = TREE_OPERAND (rhs, 0);
3414 if (visit_addr)
3415 {
3416 if (TREE_CODE (rhs) == ADDR_EXPR)
3417 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3418 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3419 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3420 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3421 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3422 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3423 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3424 0), data);
3425 else if (TREE_CODE (rhs) == CONSTRUCTOR)
3426 {
3427 unsigned int ix;
3428 tree val;
3429
3430 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
3431 if (TREE_CODE (val) == ADDR_EXPR)
3432 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
3433 else if (TREE_CODE (val) == OBJ_TYPE_REF
3434 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
3435 ret |= visit_addr (stmt,
3436 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
3437 0), data);
3438 }
3439 lhs = gimple_assign_lhs (stmt);
3440 if (TREE_CODE (lhs) == TARGET_MEM_REF
3441 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3442 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
3443 }
3444 if (visit_load)
3445 {
3446 rhs = get_base_loadstore (rhs);
3447 if (rhs)
3448 ret |= visit_load (stmt, rhs, data);
3449 }
3450 }
3451 else if (visit_addr
3452 && (is_gimple_assign (stmt)
3453 || gimple_code (stmt) == GIMPLE_COND))
3454 {
3455 for (i = 0; i < gimple_num_ops (stmt); ++i)
3456 {
3457 tree op = gimple_op (stmt, i);
3458 if (op == NULL_TREE)
3459 ;
3460 else if (TREE_CODE (op) == ADDR_EXPR)
3461 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3462 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
3463 tree with two operands. */
3464 else if (i == 1 && COMPARISON_CLASS_P (op))
3465 {
3466 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
3467 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
3468 0), data);
3469 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
3470 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
3471 0), data);
3472 }
3473 }
3474 }
3475 else if (is_gimple_call (stmt))
3476 {
3477 if (visit_store)
3478 {
3479 tree lhs = gimple_call_lhs (stmt);
3480 if (lhs)
3481 {
3482 lhs = get_base_loadstore (lhs);
3483 if (lhs)
3484 ret |= visit_store (stmt, lhs, data);
3485 }
3486 }
3487 if (visit_load || visit_addr)
3488 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3489 {
3490 tree rhs = gimple_call_arg (stmt, i);
3491 if (visit_addr
3492 && TREE_CODE (rhs) == ADDR_EXPR)
3493 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3494 else if (visit_load)
3495 {
3496 rhs = get_base_loadstore (rhs);
3497 if (rhs)
3498 ret |= visit_load (stmt, rhs, data);
3499 }
3500 }
3501 if (visit_addr
3502 && gimple_call_chain (stmt)
3503 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3504 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3505 data);
3506 if (visit_addr
3507 && gimple_call_return_slot_opt_p (stmt)
3508 && gimple_call_lhs (stmt) != NULL_TREE
3509 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3510 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
3511 }
3512 else if (gimple_code (stmt) == GIMPLE_ASM)
3513 {
3514 unsigned noutputs;
3515 const char *constraint;
3516 const char **oconstraints;
3517 bool allows_mem, allows_reg, is_inout;
3518 noutputs = gimple_asm_noutputs (stmt);
3519 oconstraints = XALLOCAVEC (const char *, noutputs);
3520 if (visit_store || visit_addr)
3521 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3522 {
3523 tree link = gimple_asm_output_op (stmt, i);
3524 tree op = get_base_loadstore (TREE_VALUE (link));
3525 if (op && visit_store)
3526 ret |= visit_store (stmt, op, data);
3527 if (visit_addr)
3528 {
3529 constraint = TREE_STRING_POINTER
3530 (TREE_VALUE (TREE_PURPOSE (link)));
3531 oconstraints[i] = constraint;
3532 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3533 &allows_reg, &is_inout);
3534 if (op && !allows_reg && allows_mem)
3535 ret |= visit_addr (stmt, op, data);
3536 }
3537 }
3538 if (visit_load || visit_addr)
3539 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3540 {
3541 tree link = gimple_asm_input_op (stmt, i);
3542 tree op = TREE_VALUE (link);
3543 if (visit_addr
3544 && TREE_CODE (op) == ADDR_EXPR)
3545 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3546 else if (visit_load || visit_addr)
3547 {
3548 op = get_base_loadstore (op);
3549 if (op)
3550 {
3551 if (visit_load)
3552 ret |= visit_load (stmt, op, data);
3553 if (visit_addr)
3554 {
3555 constraint = TREE_STRING_POINTER
3556 (TREE_VALUE (TREE_PURPOSE (link)));
3557 parse_input_constraint (&constraint, 0, 0, noutputs,
3558 0, oconstraints,
3559 &allows_mem, &allows_reg);
3560 if (!allows_reg && allows_mem)
3561 ret |= visit_addr (stmt, op, data);
3562 }
3563 }
3564 }
3565 }
3566 }
3567 else if (gimple_code (stmt) == GIMPLE_RETURN)
3568 {
3569 tree op = gimple_return_retval (stmt);
3570 if (op)
3571 {
3572 if (visit_addr
3573 && TREE_CODE (op) == ADDR_EXPR)
3574 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3575 else if (visit_load)
3576 {
3577 op = get_base_loadstore (op);
3578 if (op)
3579 ret |= visit_load (stmt, op, data);
3580 }
3581 }
3582 }
3583 else if (visit_addr
3584 && gimple_code (stmt) == GIMPLE_PHI)
3585 {
3586 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
3587 {
3588 tree op = gimple_phi_arg_def (stmt, i);
3589 if (TREE_CODE (op) == ADDR_EXPR)
3590 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3591 }
3592 }
3593 else if (visit_addr
3594 && gimple_code (stmt) == GIMPLE_GOTO)
3595 {
3596 tree op = gimple_goto_dest (stmt);
3597 if (TREE_CODE (op) == ADDR_EXPR)
3598 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3599 }
3600
3601 return ret;
3602 }
3603
3604 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
3605 should make a faster clone for this case. */
3606
3607 bool
3608 walk_stmt_load_store_ops (gimple stmt, void *data,
3609 bool (*visit_load)(gimple, tree, void *),
3610 bool (*visit_store)(gimple, tree, void *))
3611 {
3612 return walk_stmt_load_store_addr_ops (stmt, data,
3613 visit_load, visit_store, NULL);
3614 }
3615
3616 /* Helper for gimple_ior_addresses_taken_1. */
3617
3618 static bool
3619 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
3620 tree addr, void *data)
3621 {
3622 bitmap addresses_taken = (bitmap)data;
3623 addr = get_base_address (addr);
3624 if (addr
3625 && DECL_P (addr))
3626 {
3627 bitmap_set_bit (addresses_taken, DECL_UID (addr));
3628 return true;
3629 }
3630 return false;
3631 }
3632
3633 /* Set the bit for the uid of all decls that have their address taken
3634 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
3635 were any in this stmt. */
3636
3637 bool
3638 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
3639 {
3640 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
3641 gimple_ior_addresses_taken_1);
3642 }
3643
3644
3645 /* Return a printable name for symbol DECL. */
3646
3647 const char *
3648 gimple_decl_printable_name (tree decl, int verbosity)
3649 {
3650 if (!DECL_NAME (decl))
3651 return NULL;
3652
3653 if (DECL_ASSEMBLER_NAME_SET_P (decl))
3654 {
3655 const char *str, *mangled_str;
3656 int dmgl_opts = DMGL_NO_OPTS;
3657
3658 if (verbosity >= 2)
3659 {
3660 dmgl_opts = DMGL_VERBOSE
3661 | DMGL_ANSI
3662 | DMGL_GNU_V3
3663 | DMGL_RET_POSTFIX;
3664 if (TREE_CODE (decl) == FUNCTION_DECL)
3665 dmgl_opts |= DMGL_PARAMS;
3666 }
3667
3668 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3669 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
3670 return (str) ? str : mangled_str;
3671 }
3672
3673 return IDENTIFIER_POINTER (DECL_NAME (decl));
3674 }
3675
3676 /* Return TRUE iff stmt is a call to a built-in function. */
3677
3678 bool
3679 is_gimple_builtin_call (gimple stmt)
3680 {
3681 tree callee;
3682
3683 if (is_gimple_call (stmt)
3684 && (callee = gimple_call_fndecl (stmt))
3685 && is_builtin_fn (callee)
3686 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
3687 return true;
3688
3689 return false;
3690 }
3691
3692 /* Return true when STMTs arguments match those of FNDECL. */
3693
3694 static bool
3695 validate_call (gimple stmt, tree fndecl)
3696 {
3697 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3698 unsigned nargs = gimple_call_num_args (stmt);
3699 for (unsigned i = 0; i < nargs; ++i)
3700 {
3701 /* Variadic args follow. */
3702 if (!targs)
3703 return true;
3704 tree arg = gimple_call_arg (stmt, i);
3705 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
3706 && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
3707 ;
3708 else if (POINTER_TYPE_P (TREE_TYPE (arg))
3709 && POINTER_TYPE_P (TREE_VALUE (targs)))
3710 ;
3711 else if (TREE_CODE (TREE_TYPE (arg))
3712 != TREE_CODE (TREE_VALUE (targs)))
3713 return false;
3714 targs = TREE_CHAIN (targs);
3715 }
3716 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
3717 return false;
3718 return true;
3719 }
3720
3721 /* Return true when STMT is builtins call to CLASS. */
3722
3723 bool
3724 gimple_call_builtin_p (gimple stmt, enum built_in_class klass)
3725 {
3726 tree fndecl;
3727 if (is_gimple_call (stmt)
3728 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
3729 && DECL_BUILT_IN_CLASS (fndecl) == klass)
3730 return validate_call (stmt, fndecl);
3731 return false;
3732 }
3733
3734 /* Return true when STMT is builtins call to CODE of CLASS. */
3735
3736 bool
3737 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
3738 {
3739 tree fndecl;
3740 if (is_gimple_call (stmt)
3741 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
3742 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3743 && DECL_FUNCTION_CODE (fndecl) == code)
3744 return validate_call (stmt, fndecl);
3745 return false;
3746 }
3747
3748 /* Return true if STMT clobbers memory. STMT is required to be a
3749 GIMPLE_ASM. */
3750
3751 bool
3752 gimple_asm_clobbers_memory_p (const_gimple stmt)
3753 {
3754 unsigned i;
3755
3756 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
3757 {
3758 tree op = gimple_asm_clobber_op (stmt, i);
3759 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
3760 return true;
3761 }
3762
3763 return false;
3764 }
3765
3766
3767 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
3768 useless type conversion, otherwise return false.
3769
3770 This function implicitly defines the middle-end type system. With
3771 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
3772 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
3773 the following invariants shall be fulfilled:
3774
3775 1) useless_type_conversion_p is transitive.
3776 If a < b and b < c then a < c.
3777
3778 2) useless_type_conversion_p is not symmetric.
3779 From a < b does not follow a > b.
3780
3781 3) Types define the available set of operations applicable to values.
3782 A type conversion is useless if the operations for the target type
3783 is a subset of the operations for the source type. For example
3784 casts to void* are useless, casts from void* are not (void* can't
3785 be dereferenced or offsetted, but copied, hence its set of operations
3786 is a strict subset of that of all other data pointer types). Casts
3787 to const T* are useless (can't be written to), casts from const T*
3788 to T* are not. */
3789
3790 bool
3791 useless_type_conversion_p (tree outer_type, tree inner_type)
3792 {
3793 /* Do the following before stripping toplevel qualifiers. */
3794 if (POINTER_TYPE_P (inner_type)
3795 && POINTER_TYPE_P (outer_type))
3796 {
3797 /* Do not lose casts between pointers to different address spaces. */
3798 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
3799 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
3800 return false;
3801 }
3802
3803 /* From now on qualifiers on value types do not matter. */
3804 inner_type = TYPE_MAIN_VARIANT (inner_type);
3805 outer_type = TYPE_MAIN_VARIANT (outer_type);
3806
3807 if (inner_type == outer_type)
3808 return true;
3809
3810 /* If we know the canonical types, compare them. */
3811 if (TYPE_CANONICAL (inner_type)
3812 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
3813 return true;
3814
3815 /* Changes in machine mode are never useless conversions unless we
3816 deal with aggregate types in which case we defer to later checks. */
3817 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
3818 && !AGGREGATE_TYPE_P (inner_type))
3819 return false;
3820
3821 /* If both the inner and outer types are integral types, then the
3822 conversion is not necessary if they have the same mode and
3823 signedness and precision, and both or neither are boolean. */
3824 if (INTEGRAL_TYPE_P (inner_type)
3825 && INTEGRAL_TYPE_P (outer_type))
3826 {
3827 /* Preserve changes in signedness or precision. */
3828 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
3829 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
3830 return false;
3831
3832 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
3833 of precision one. */
3834 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
3835 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
3836 && TYPE_PRECISION (outer_type) != 1)
3837 return false;
3838
3839 /* We don't need to preserve changes in the types minimum or
3840 maximum value in general as these do not generate code
3841 unless the types precisions are different. */
3842 return true;
3843 }
3844
3845 /* Scalar floating point types with the same mode are compatible. */
3846 else if (SCALAR_FLOAT_TYPE_P (inner_type)
3847 && SCALAR_FLOAT_TYPE_P (outer_type))
3848 return true;
3849
3850 /* Fixed point types with the same mode are compatible. */
3851 else if (FIXED_POINT_TYPE_P (inner_type)
3852 && FIXED_POINT_TYPE_P (outer_type))
3853 return true;
3854
3855 /* We need to take special care recursing to pointed-to types. */
3856 else if (POINTER_TYPE_P (inner_type)
3857 && POINTER_TYPE_P (outer_type))
3858 {
3859 /* Do not lose casts to function pointer types. */
3860 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
3861 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
3862 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
3863 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
3864 return false;
3865
3866 /* We do not care for const qualification of the pointed-to types
3867 as const qualification has no semantic value to the middle-end. */
3868
3869 /* Otherwise pointers/references are equivalent. */
3870 return true;
3871 }
3872
3873 /* Recurse for complex types. */
3874 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
3875 && TREE_CODE (outer_type) == COMPLEX_TYPE)
3876 return useless_type_conversion_p (TREE_TYPE (outer_type),
3877 TREE_TYPE (inner_type));
3878
3879 /* Recurse for vector types with the same number of subparts. */
3880 else if (TREE_CODE (inner_type) == VECTOR_TYPE
3881 && TREE_CODE (outer_type) == VECTOR_TYPE
3882 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
3883 return useless_type_conversion_p (TREE_TYPE (outer_type),
3884 TREE_TYPE (inner_type));
3885
3886 else if (TREE_CODE (inner_type) == ARRAY_TYPE
3887 && TREE_CODE (outer_type) == ARRAY_TYPE)
3888 {
3889 /* Preserve string attributes. */
3890 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
3891 return false;
3892
3893 /* Conversions from array types with unknown extent to
3894 array types with known extent are not useless. */
3895 if (!TYPE_DOMAIN (inner_type)
3896 && TYPE_DOMAIN (outer_type))
3897 return false;
3898
3899 /* Nor are conversions from array types with non-constant size to
3900 array types with constant size or to different size. */
3901 if (TYPE_SIZE (outer_type)
3902 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
3903 && (!TYPE_SIZE (inner_type)
3904 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
3905 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
3906 TYPE_SIZE (inner_type))))
3907 return false;
3908
3909 /* Check conversions between arrays with partially known extents.
3910 If the array min/max values are constant they have to match.
3911 Otherwise allow conversions to unknown and variable extents.
3912 In particular this declares conversions that may change the
3913 mode to BLKmode as useless. */
3914 if (TYPE_DOMAIN (inner_type)
3915 && TYPE_DOMAIN (outer_type)
3916 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
3917 {
3918 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
3919 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
3920 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
3921 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
3922
3923 /* After gimplification a variable min/max value carries no
3924 additional information compared to a NULL value. All that
3925 matters has been lowered to be part of the IL. */
3926 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
3927 inner_min = NULL_TREE;
3928 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
3929 outer_min = NULL_TREE;
3930 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
3931 inner_max = NULL_TREE;
3932 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
3933 outer_max = NULL_TREE;
3934
3935 /* Conversions NULL / variable <- cst are useless, but not
3936 the other way around. */
3937 if (outer_min
3938 && (!inner_min
3939 || !tree_int_cst_equal (inner_min, outer_min)))
3940 return false;
3941 if (outer_max
3942 && (!inner_max
3943 || !tree_int_cst_equal (inner_max, outer_max)))
3944 return false;
3945 }
3946
3947 /* Recurse on the element check. */
3948 return useless_type_conversion_p (TREE_TYPE (outer_type),
3949 TREE_TYPE (inner_type));
3950 }
3951
3952 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
3953 || TREE_CODE (inner_type) == METHOD_TYPE)
3954 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
3955 {
3956 tree outer_parm, inner_parm;
3957
3958 /* If the return types are not compatible bail out. */
3959 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
3960 TREE_TYPE (inner_type)))
3961 return false;
3962
3963 /* Method types should belong to a compatible base class. */
3964 if (TREE_CODE (inner_type) == METHOD_TYPE
3965 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
3966 TYPE_METHOD_BASETYPE (inner_type)))
3967 return false;
3968
3969 /* A conversion to an unprototyped argument list is ok. */
3970 if (!prototype_p (outer_type))
3971 return true;
3972
3973 /* If the unqualified argument types are compatible the conversion
3974 is useless. */
3975 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
3976 return true;
3977
3978 for (outer_parm = TYPE_ARG_TYPES (outer_type),
3979 inner_parm = TYPE_ARG_TYPES (inner_type);
3980 outer_parm && inner_parm;
3981 outer_parm = TREE_CHAIN (outer_parm),
3982 inner_parm = TREE_CHAIN (inner_parm))
3983 if (!useless_type_conversion_p
3984 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
3985 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
3986 return false;
3987
3988 /* If there is a mismatch in the number of arguments the functions
3989 are not compatible. */
3990 if (outer_parm || inner_parm)
3991 return false;
3992
3993 /* Defer to the target if necessary. */
3994 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
3995 return comp_type_attributes (outer_type, inner_type) != 0;
3996
3997 return true;
3998 }
3999
4000 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
4001 explicit conversions for types involving to be structurally
4002 compared types. */
4003 else if (AGGREGATE_TYPE_P (inner_type)
4004 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
4005 return false;
4006
4007 return false;
4008 }
4009
4010 /* Return true if a conversion from either type of TYPE1 and TYPE2
4011 to the other is not required. Otherwise return false. */
4012
4013 bool
4014 types_compatible_p (tree type1, tree type2)
4015 {
4016 return (type1 == type2
4017 || (useless_type_conversion_p (type1, type2)
4018 && useless_type_conversion_p (type2, type1)));
4019 }
4020
4021 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
4022
4023 void
4024 dump_decl_set (FILE *file, bitmap set)
4025 {
4026 if (set)
4027 {
4028 bitmap_iterator bi;
4029 unsigned i;
4030
4031 fprintf (file, "{ ");
4032
4033 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
4034 {
4035 fprintf (file, "D.%u", i);
4036 fprintf (file, " ");
4037 }
4038
4039 fprintf (file, "}");
4040 }
4041 else
4042 fprintf (file, "NIL");
4043 }
4044
4045 /* Given SSA_NAMEs NAME1 and NAME2, return true if they are candidates for
4046 coalescing together, false otherwise.
4047
4048 This must stay consistent with var_map_base_init in tree-ssa-live.c. */
4049
4050 bool
4051 gimple_can_coalesce_p (tree name1, tree name2)
4052 {
4053 /* First check the SSA_NAME's associated DECL. We only want to
4054 coalesce if they have the same DECL or both have no associated DECL. */
4055 tree var1 = SSA_NAME_VAR (name1);
4056 tree var2 = SSA_NAME_VAR (name2);
4057 var1 = (var1 && (!VAR_P (var1) || !DECL_IGNORED_P (var1))) ? var1 : NULL_TREE;
4058 var2 = (var2 && (!VAR_P (var2) || !DECL_IGNORED_P (var2))) ? var2 : NULL_TREE;
4059 if (var1 != var2)
4060 return false;
4061
4062 /* Now check the types. If the types are the same, then we should
4063 try to coalesce V1 and V2. */
4064 tree t1 = TREE_TYPE (name1);
4065 tree t2 = TREE_TYPE (name2);
4066 if (t1 == t2)
4067 return true;
4068
4069 /* If the types are not the same, check for a canonical type match. This
4070 (for example) allows coalescing when the types are fundamentally the
4071 same, but just have different names.
4072
4073 Note pointer types with different address spaces may have the same
4074 canonical type. Those are rejected for coalescing by the
4075 types_compatible_p check. */
4076 if (TYPE_CANONICAL (t1)
4077 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2)
4078 && types_compatible_p (t1, t2))
4079 return true;
4080
4081 return false;
4082 }
4083
4084 /* Return true when CALL is a call stmt that definitely doesn't
4085 free any memory or makes it unavailable otherwise. */
4086 bool
4087 nonfreeing_call_p (gimple call)
4088 {
4089 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
4090 && gimple_call_flags (call) & ECF_LEAF)
4091 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
4092 {
4093 /* Just in case these become ECF_LEAF in the future. */
4094 case BUILT_IN_FREE:
4095 case BUILT_IN_TM_FREE:
4096 case BUILT_IN_REALLOC:
4097 case BUILT_IN_STACK_RESTORE:
4098 return false;
4099 default:
4100 return true;
4101 }
4102
4103 return false;
4104 }
4105
4106 /* Create a new VAR_DECL and copy information from VAR to it. */
4107
4108 tree
4109 copy_var_decl (tree var, tree name, tree type)
4110 {
4111 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
4112
4113 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
4114 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
4115 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
4116 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
4117 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
4118 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
4119 TREE_NO_WARNING (copy) = TREE_NO_WARNING (var);
4120 TREE_USED (copy) = 1;
4121 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4122 DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);
4123
4124 return copy;
4125 }