Factor unrelated declarations out of tree.h.
[gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "calls.h"
29 #include "stmt.h"
30 #include "stor-layout.h"
31 #include "ggc.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "gimple.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "diagnostic.h"
40 #include "value-prof.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "demangle.h"
44 #include "langhooks.h"
45 #include "bitmap.h"
46
47
48 /* All the tuples have their operand vector (if present) at the very bottom
49 of the structure. Therefore, the offset required to find the
50 operands vector the size of the structure minus the size of the 1
51 element tree array at the end (see gimple_ops). */
52 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
53 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
54 EXPORTED_CONST size_t gimple_ops_offset_[] = {
55 #include "gsstruct.def"
56 };
57 #undef DEFGSSTRUCT
58
59 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
60 static const size_t gsstruct_code_size[] = {
61 #include "gsstruct.def"
62 };
63 #undef DEFGSSTRUCT
64
65 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
66 const char *const gimple_code_name[] = {
67 #include "gimple.def"
68 };
69 #undef DEFGSCODE
70
71 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
72 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
73 #include "gimple.def"
74 };
75 #undef DEFGSCODE
76
77 /* Gimple stats. */
78
79 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
80 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
81
82 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
83 static const char * const gimple_alloc_kind_names[] = {
84 "assignments",
85 "phi nodes",
86 "conditionals",
87 "everything else"
88 };
89
90 /* Gimple tuple constructors.
91 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
92 be passed a NULL to start with an empty sequence. */
93
94 /* Set the code for statement G to CODE. */
95
96 static inline void
97 gimple_set_code (gimple g, enum gimple_code code)
98 {
99 g->gsbase.code = code;
100 }
101
102 /* Return the number of bytes needed to hold a GIMPLE statement with
103 code CODE. */
104
105 static inline size_t
106 gimple_size (enum gimple_code code)
107 {
108 return gsstruct_code_size[gss_for_code (code)];
109 }
110
111 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
112 operands. */
113
114 gimple
115 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
116 {
117 size_t size;
118 gimple stmt;
119
120 size = gimple_size (code);
121 if (num_ops > 0)
122 size += sizeof (tree) * (num_ops - 1);
123
124 if (GATHER_STATISTICS)
125 {
126 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
127 gimple_alloc_counts[(int) kind]++;
128 gimple_alloc_sizes[(int) kind] += size;
129 }
130
131 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
132 gimple_set_code (stmt, code);
133 gimple_set_num_ops (stmt, num_ops);
134
135 /* Do not call gimple_set_modified here as it has other side
136 effects and this tuple is still not completely built. */
137 stmt->gsbase.modified = 1;
138 gimple_init_singleton (stmt);
139
140 return stmt;
141 }
142
143 /* Set SUBCODE to be the code of the expression computed by statement G. */
144
145 static inline void
146 gimple_set_subcode (gimple g, unsigned subcode)
147 {
148 /* We only have 16 bits for the RHS code. Assert that we are not
149 overflowing it. */
150 gcc_assert (subcode < (1 << 16));
151 g->gsbase.subcode = subcode;
152 }
153
154
155
156 /* Build a tuple with operands. CODE is the statement to build (which
157 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
158 for the new tuple. NUM_OPS is the number of operands to allocate. */
159
160 #define gimple_build_with_ops(c, s, n) \
161 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
162
163 static gimple
164 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
165 unsigned num_ops MEM_STAT_DECL)
166 {
167 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
168 gimple_set_subcode (s, subcode);
169
170 return s;
171 }
172
173
174 /* Build a GIMPLE_RETURN statement returning RETVAL. */
175
176 gimple
177 gimple_build_return (tree retval)
178 {
179 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 2);
180 if (retval)
181 gimple_return_set_retval (s, retval);
182 return s;
183 }
184
185 /* Reset alias information on call S. */
186
187 void
188 gimple_call_reset_alias_info (gimple s)
189 {
190 if (gimple_call_flags (s) & ECF_CONST)
191 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
192 else
193 pt_solution_reset (gimple_call_use_set (s));
194 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
195 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
196 else
197 pt_solution_reset (gimple_call_clobber_set (s));
198 }
199
200 /* Helper for gimple_build_call, gimple_build_call_valist,
201 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
202 components of a GIMPLE_CALL statement to function FN with NARGS
203 arguments. */
204
205 static inline gimple
206 gimple_build_call_1 (tree fn, unsigned nargs)
207 {
208 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
209 if (TREE_CODE (fn) == FUNCTION_DECL)
210 fn = build_fold_addr_expr (fn);
211 gimple_set_op (s, 1, fn);
212 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
213 gimple_call_reset_alias_info (s);
214 return s;
215 }
216
217
218 /* Build a GIMPLE_CALL statement to function FN with the arguments
219 specified in vector ARGS. */
220
221 gimple
222 gimple_build_call_vec (tree fn, vec<tree> args)
223 {
224 unsigned i;
225 unsigned nargs = args.length ();
226 gimple call = gimple_build_call_1 (fn, nargs);
227
228 for (i = 0; i < nargs; i++)
229 gimple_call_set_arg (call, i, args[i]);
230
231 return call;
232 }
233
234
235 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
236 arguments. The ... are the arguments. */
237
238 gimple
239 gimple_build_call (tree fn, unsigned nargs, ...)
240 {
241 va_list ap;
242 gimple call;
243 unsigned i;
244
245 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
246
247 call = gimple_build_call_1 (fn, nargs);
248
249 va_start (ap, nargs);
250 for (i = 0; i < nargs; i++)
251 gimple_call_set_arg (call, i, va_arg (ap, tree));
252 va_end (ap);
253
254 return call;
255 }
256
257
258 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
259 arguments. AP contains the arguments. */
260
261 gimple
262 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
263 {
264 gimple call;
265 unsigned i;
266
267 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
268
269 call = gimple_build_call_1 (fn, nargs);
270
271 for (i = 0; i < nargs; i++)
272 gimple_call_set_arg (call, i, va_arg (ap, tree));
273
274 return call;
275 }
276
277
278 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
279 Build the basic components of a GIMPLE_CALL statement to internal
280 function FN with NARGS arguments. */
281
282 static inline gimple
283 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
284 {
285 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
286 s->gsbase.subcode |= GF_CALL_INTERNAL;
287 gimple_call_set_internal_fn (s, fn);
288 gimple_call_reset_alias_info (s);
289 return s;
290 }
291
292
293 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
294 the number of arguments. The ... are the arguments. */
295
296 gimple
297 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
298 {
299 va_list ap;
300 gimple call;
301 unsigned i;
302
303 call = gimple_build_call_internal_1 (fn, nargs);
304 va_start (ap, nargs);
305 for (i = 0; i < nargs; i++)
306 gimple_call_set_arg (call, i, va_arg (ap, tree));
307 va_end (ap);
308
309 return call;
310 }
311
312
313 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
314 specified in vector ARGS. */
315
316 gimple
317 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
318 {
319 unsigned i, nargs;
320 gimple call;
321
322 nargs = args.length ();
323 call = gimple_build_call_internal_1 (fn, nargs);
324 for (i = 0; i < nargs; i++)
325 gimple_call_set_arg (call, i, args[i]);
326
327 return call;
328 }
329
330
331 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
332 assumed to be in GIMPLE form already. Minimal checking is done of
333 this fact. */
334
335 gimple
336 gimple_build_call_from_tree (tree t)
337 {
338 unsigned i, nargs;
339 gimple call;
340 tree fndecl = get_callee_fndecl (t);
341
342 gcc_assert (TREE_CODE (t) == CALL_EXPR);
343
344 nargs = call_expr_nargs (t);
345 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
346
347 for (i = 0; i < nargs; i++)
348 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
349
350 gimple_set_block (call, TREE_BLOCK (t));
351
352 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
353 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
354 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
355 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
356 if (fndecl
357 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
358 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
359 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
360 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
361 else
362 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
363 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
364 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
365 gimple_set_no_warning (call, TREE_NO_WARNING (t));
366
367 return call;
368 }
369
370
371 /* Build a GIMPLE_ASSIGN statement.
372
373 LHS of the assignment.
374 RHS of the assignment which can be unary or binary. */
375
376 gimple
377 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
378 {
379 enum tree_code subcode;
380 tree op1, op2, op3;
381
382 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
383 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
384 PASS_MEM_STAT);
385 }
386
387
388 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
389 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
390 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
391
392 gimple
393 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
394 tree op2, tree op3 MEM_STAT_DECL)
395 {
396 unsigned num_ops;
397 gimple p;
398
399 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
400 code). */
401 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
402
403 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
404 PASS_MEM_STAT);
405 gimple_assign_set_lhs (p, lhs);
406 gimple_assign_set_rhs1 (p, op1);
407 if (op2)
408 {
409 gcc_assert (num_ops > 2);
410 gimple_assign_set_rhs2 (p, op2);
411 }
412
413 if (op3)
414 {
415 gcc_assert (num_ops > 3);
416 gimple_assign_set_rhs3 (p, op3);
417 }
418
419 return p;
420 }
421
422 gimple
423 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
424 tree op2 MEM_STAT_DECL)
425 {
426 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
427 PASS_MEM_STAT);
428 }
429
430
431 /* Build a GIMPLE_COND statement.
432
433 PRED is the condition used to compare LHS and the RHS.
434 T_LABEL is the label to jump to if the condition is true.
435 F_LABEL is the label to jump to otherwise. */
436
437 gimple
438 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
439 tree t_label, tree f_label)
440 {
441 gimple p;
442
443 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
444 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
445 gimple_cond_set_lhs (p, lhs);
446 gimple_cond_set_rhs (p, rhs);
447 gimple_cond_set_true_label (p, t_label);
448 gimple_cond_set_false_label (p, f_label);
449 return p;
450 }
451
452 /* Build a GIMPLE_COND statement from the conditional expression tree
453 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
454
455 gimple
456 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
457 {
458 enum tree_code code;
459 tree lhs, rhs;
460
461 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
462 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
463 }
464
465 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
466 boolean expression tree COND. */
467
468 void
469 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
470 {
471 enum tree_code code;
472 tree lhs, rhs;
473
474 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
475 gimple_cond_set_condition (stmt, code, lhs, rhs);
476 }
477
478 /* Build a GIMPLE_LABEL statement for LABEL. */
479
480 gimple
481 gimple_build_label (tree label)
482 {
483 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
484 gimple_label_set_label (p, label);
485 return p;
486 }
487
488 /* Build a GIMPLE_GOTO statement to label DEST. */
489
490 gimple
491 gimple_build_goto (tree dest)
492 {
493 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
494 gimple_goto_set_dest (p, dest);
495 return p;
496 }
497
498
499 /* Build a GIMPLE_NOP statement. */
500
501 gimple
502 gimple_build_nop (void)
503 {
504 return gimple_alloc (GIMPLE_NOP, 0);
505 }
506
507
508 /* Build a GIMPLE_BIND statement.
509 VARS are the variables in BODY.
510 BLOCK is the containing block. */
511
512 gimple
513 gimple_build_bind (tree vars, gimple_seq body, tree block)
514 {
515 gimple p = gimple_alloc (GIMPLE_BIND, 0);
516 gimple_bind_set_vars (p, vars);
517 if (body)
518 gimple_bind_set_body (p, body);
519 if (block)
520 gimple_bind_set_block (p, block);
521 return p;
522 }
523
524 /* Helper function to set the simple fields of a asm stmt.
525
526 STRING is a pointer to a string that is the asm blocks assembly code.
527 NINPUT is the number of register inputs.
528 NOUTPUT is the number of register outputs.
529 NCLOBBERS is the number of clobbered registers.
530 */
531
532 static inline gimple
533 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
534 unsigned nclobbers, unsigned nlabels)
535 {
536 gimple p;
537 int size = strlen (string);
538
539 /* ASMs with labels cannot have outputs. This should have been
540 enforced by the front end. */
541 gcc_assert (nlabels == 0 || noutputs == 0);
542
543 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
544 ninputs + noutputs + nclobbers + nlabels);
545
546 p->gimple_asm.ni = ninputs;
547 p->gimple_asm.no = noutputs;
548 p->gimple_asm.nc = nclobbers;
549 p->gimple_asm.nl = nlabels;
550 p->gimple_asm.string = ggc_alloc_string (string, size);
551
552 if (GATHER_STATISTICS)
553 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
554
555 return p;
556 }
557
558 /* Build a GIMPLE_ASM statement.
559
560 STRING is the assembly code.
561 NINPUT is the number of register inputs.
562 NOUTPUT is the number of register outputs.
563 NCLOBBERS is the number of clobbered registers.
564 INPUTS is a vector of the input register parameters.
565 OUTPUTS is a vector of the output register parameters.
566 CLOBBERS is a vector of the clobbered register parameters.
567 LABELS is a vector of destination labels. */
568
569 gimple
570 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
571 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
572 vec<tree, va_gc> *labels)
573 {
574 gimple p;
575 unsigned i;
576
577 p = gimple_build_asm_1 (string,
578 vec_safe_length (inputs),
579 vec_safe_length (outputs),
580 vec_safe_length (clobbers),
581 vec_safe_length (labels));
582
583 for (i = 0; i < vec_safe_length (inputs); i++)
584 gimple_asm_set_input_op (p, i, (*inputs)[i]);
585
586 for (i = 0; i < vec_safe_length (outputs); i++)
587 gimple_asm_set_output_op (p, i, (*outputs)[i]);
588
589 for (i = 0; i < vec_safe_length (clobbers); i++)
590 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
591
592 for (i = 0; i < vec_safe_length (labels); i++)
593 gimple_asm_set_label_op (p, i, (*labels)[i]);
594
595 return p;
596 }
597
598 /* Build a GIMPLE_CATCH statement.
599
600 TYPES are the catch types.
601 HANDLER is the exception handler. */
602
603 gimple
604 gimple_build_catch (tree types, gimple_seq handler)
605 {
606 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
607 gimple_catch_set_types (p, types);
608 if (handler)
609 gimple_catch_set_handler (p, handler);
610
611 return p;
612 }
613
614 /* Build a GIMPLE_EH_FILTER statement.
615
616 TYPES are the filter's types.
617 FAILURE is the filter's failure action. */
618
619 gimple
620 gimple_build_eh_filter (tree types, gimple_seq failure)
621 {
622 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
623 gimple_eh_filter_set_types (p, types);
624 if (failure)
625 gimple_eh_filter_set_failure (p, failure);
626
627 return p;
628 }
629
630 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
631
632 gimple
633 gimple_build_eh_must_not_throw (tree decl)
634 {
635 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
636
637 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
638 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
639 gimple_eh_must_not_throw_set_fndecl (p, decl);
640
641 return p;
642 }
643
644 /* Build a GIMPLE_EH_ELSE statement. */
645
646 gimple
647 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
648 {
649 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
650 gimple_eh_else_set_n_body (p, n_body);
651 gimple_eh_else_set_e_body (p, e_body);
652 return p;
653 }
654
655 /* Build a GIMPLE_TRY statement.
656
657 EVAL is the expression to evaluate.
658 CLEANUP is the cleanup expression.
659 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
660 whether this is a try/catch or a try/finally respectively. */
661
662 gimple
663 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
664 enum gimple_try_flags kind)
665 {
666 gimple p;
667
668 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
669 p = gimple_alloc (GIMPLE_TRY, 0);
670 gimple_set_subcode (p, kind);
671 if (eval)
672 gimple_try_set_eval (p, eval);
673 if (cleanup)
674 gimple_try_set_cleanup (p, cleanup);
675
676 return p;
677 }
678
679 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
680
681 CLEANUP is the cleanup expression. */
682
683 gimple
684 gimple_build_wce (gimple_seq cleanup)
685 {
686 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
687 if (cleanup)
688 gimple_wce_set_cleanup (p, cleanup);
689
690 return p;
691 }
692
693
694 /* Build a GIMPLE_RESX statement. */
695
696 gimple
697 gimple_build_resx (int region)
698 {
699 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
700 p->gimple_eh_ctrl.region = region;
701 return p;
702 }
703
704
705 /* The helper for constructing a gimple switch statement.
706 INDEX is the switch's index.
707 NLABELS is the number of labels in the switch excluding the default.
708 DEFAULT_LABEL is the default label for the switch statement. */
709
710 gimple
711 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
712 {
713 /* nlabels + 1 default label + 1 index. */
714 gcc_checking_assert (default_label);
715 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
716 1 + 1 + nlabels);
717 gimple_switch_set_index (p, index);
718 gimple_switch_set_default_label (p, default_label);
719 return p;
720 }
721
722 /* Build a GIMPLE_SWITCH statement.
723
724 INDEX is the switch's index.
725 DEFAULT_LABEL is the default label
726 ARGS is a vector of labels excluding the default. */
727
728 gimple
729 gimple_build_switch (tree index, tree default_label, vec<tree> args)
730 {
731 unsigned i, nlabels = args.length ();
732
733 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
734
735 /* Copy the labels from the vector to the switch statement. */
736 for (i = 0; i < nlabels; i++)
737 gimple_switch_set_label (p, i + 1, args[i]);
738
739 return p;
740 }
741
742 /* Build a GIMPLE_EH_DISPATCH statement. */
743
744 gimple
745 gimple_build_eh_dispatch (int region)
746 {
747 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
748 p->gimple_eh_ctrl.region = region;
749 return p;
750 }
751
752 /* Build a new GIMPLE_DEBUG_BIND statement.
753
754 VAR is bound to VALUE; block and location are taken from STMT. */
755
756 gimple
757 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
758 {
759 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
760 (unsigned)GIMPLE_DEBUG_BIND, 2
761 PASS_MEM_STAT);
762
763 gimple_debug_bind_set_var (p, var);
764 gimple_debug_bind_set_value (p, value);
765 if (stmt)
766 gimple_set_location (p, gimple_location (stmt));
767
768 return p;
769 }
770
771
772 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
773
774 VAR is bound to VALUE; block and location are taken from STMT. */
775
776 gimple
777 gimple_build_debug_source_bind_stat (tree var, tree value,
778 gimple stmt MEM_STAT_DECL)
779 {
780 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
781 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
782 PASS_MEM_STAT);
783
784 gimple_debug_source_bind_set_var (p, var);
785 gimple_debug_source_bind_set_value (p, value);
786 if (stmt)
787 gimple_set_location (p, gimple_location (stmt));
788
789 return p;
790 }
791
792
793 /* Build a GIMPLE_OMP_CRITICAL statement.
794
795 BODY is the sequence of statements for which only one thread can execute.
796 NAME is optional identifier for this critical block. */
797
798 gimple
799 gimple_build_omp_critical (gimple_seq body, tree name)
800 {
801 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
802 gimple_omp_critical_set_name (p, name);
803 if (body)
804 gimple_omp_set_body (p, body);
805
806 return p;
807 }
808
809 /* Build a GIMPLE_OMP_FOR statement.
810
811 BODY is sequence of statements inside the for loop.
812 KIND is the `for' variant.
813 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
814 lastprivate, reductions, ordered, schedule, and nowait.
815 COLLAPSE is the collapse count.
816 PRE_BODY is the sequence of statements that are loop invariant. */
817
818 gimple
819 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
820 gimple_seq pre_body)
821 {
822 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
823 if (body)
824 gimple_omp_set_body (p, body);
825 gimple_omp_for_set_clauses (p, clauses);
826 gimple_omp_for_set_kind (p, kind);
827 p->gimple_omp_for.collapse = collapse;
828 p->gimple_omp_for.iter
829 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
830 if (pre_body)
831 gimple_omp_for_set_pre_body (p, pre_body);
832
833 return p;
834 }
835
836
837 /* Build a GIMPLE_OMP_PARALLEL statement.
838
839 BODY is sequence of statements which are executed in parallel.
840 CLAUSES, are the OMP parallel construct's clauses.
841 CHILD_FN is the function created for the parallel threads to execute.
842 DATA_ARG are the shared data argument(s). */
843
844 gimple
845 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
846 tree data_arg)
847 {
848 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
849 if (body)
850 gimple_omp_set_body (p, body);
851 gimple_omp_parallel_set_clauses (p, clauses);
852 gimple_omp_parallel_set_child_fn (p, child_fn);
853 gimple_omp_parallel_set_data_arg (p, data_arg);
854
855 return p;
856 }
857
858
859 /* Build a GIMPLE_OMP_TASK statement.
860
861 BODY is sequence of statements which are executed by the explicit task.
862 CLAUSES, are the OMP parallel construct's clauses.
863 CHILD_FN is the function created for the parallel threads to execute.
864 DATA_ARG are the shared data argument(s).
865 COPY_FN is the optional function for firstprivate initialization.
866 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
867
868 gimple
869 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
870 tree data_arg, tree copy_fn, tree arg_size,
871 tree arg_align)
872 {
873 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
874 if (body)
875 gimple_omp_set_body (p, body);
876 gimple_omp_task_set_clauses (p, clauses);
877 gimple_omp_task_set_child_fn (p, child_fn);
878 gimple_omp_task_set_data_arg (p, data_arg);
879 gimple_omp_task_set_copy_fn (p, copy_fn);
880 gimple_omp_task_set_arg_size (p, arg_size);
881 gimple_omp_task_set_arg_align (p, arg_align);
882
883 return p;
884 }
885
886
887 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
888
889 BODY is the sequence of statements in the section. */
890
891 gimple
892 gimple_build_omp_section (gimple_seq body)
893 {
894 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
895 if (body)
896 gimple_omp_set_body (p, body);
897
898 return p;
899 }
900
901
902 /* Build a GIMPLE_OMP_MASTER statement.
903
904 BODY is the sequence of statements to be executed by just the master. */
905
906 gimple
907 gimple_build_omp_master (gimple_seq body)
908 {
909 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
910 if (body)
911 gimple_omp_set_body (p, body);
912
913 return p;
914 }
915
916
917 /* Build a GIMPLE_OMP_TASKGROUP statement.
918
919 BODY is the sequence of statements to be executed by the taskgroup
920 construct. */
921
922 gimple
923 gimple_build_omp_taskgroup (gimple_seq body)
924 {
925 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
926 if (body)
927 gimple_omp_set_body (p, body);
928
929 return p;
930 }
931
932
933 /* Build a GIMPLE_OMP_CONTINUE statement.
934
935 CONTROL_DEF is the definition of the control variable.
936 CONTROL_USE is the use of the control variable. */
937
938 gimple
939 gimple_build_omp_continue (tree control_def, tree control_use)
940 {
941 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
942 gimple_omp_continue_set_control_def (p, control_def);
943 gimple_omp_continue_set_control_use (p, control_use);
944 return p;
945 }
946
947 /* Build a GIMPLE_OMP_ORDERED statement.
948
949 BODY is the sequence of statements inside a loop that will executed in
950 sequence. */
951
952 gimple
953 gimple_build_omp_ordered (gimple_seq body)
954 {
955 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
956 if (body)
957 gimple_omp_set_body (p, body);
958
959 return p;
960 }
961
962
963 /* Build a GIMPLE_OMP_RETURN statement.
964 WAIT_P is true if this is a non-waiting return. */
965
966 gimple
967 gimple_build_omp_return (bool wait_p)
968 {
969 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
970 if (wait_p)
971 gimple_omp_return_set_nowait (p);
972
973 return p;
974 }
975
976
977 /* Build a GIMPLE_OMP_SECTIONS statement.
978
979 BODY is a sequence of section statements.
980 CLAUSES are any of the OMP sections contsruct's clauses: private,
981 firstprivate, lastprivate, reduction, and nowait. */
982
983 gimple
984 gimple_build_omp_sections (gimple_seq body, tree clauses)
985 {
986 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
987 if (body)
988 gimple_omp_set_body (p, body);
989 gimple_omp_sections_set_clauses (p, clauses);
990
991 return p;
992 }
993
994
995 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
996
997 gimple
998 gimple_build_omp_sections_switch (void)
999 {
1000 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1001 }
1002
1003
1004 /* Build a GIMPLE_OMP_SINGLE statement.
1005
1006 BODY is the sequence of statements that will be executed once.
1007 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1008 copyprivate, nowait. */
1009
1010 gimple
1011 gimple_build_omp_single (gimple_seq body, tree clauses)
1012 {
1013 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1014 if (body)
1015 gimple_omp_set_body (p, body);
1016 gimple_omp_single_set_clauses (p, clauses);
1017
1018 return p;
1019 }
1020
1021
1022 /* Build a GIMPLE_OMP_TARGET statement.
1023
1024 BODY is the sequence of statements that will be executed.
1025 CLAUSES are any of the OMP target construct's clauses. */
1026
1027 gimple
1028 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1029 {
1030 gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
1031 if (body)
1032 gimple_omp_set_body (p, body);
1033 gimple_omp_target_set_clauses (p, clauses);
1034 gimple_omp_target_set_kind (p, kind);
1035
1036 return p;
1037 }
1038
1039
1040 /* Build a GIMPLE_OMP_TEAMS statement.
1041
1042 BODY is the sequence of statements that will be executed.
1043 CLAUSES are any of the OMP teams construct's clauses. */
1044
1045 gimple
1046 gimple_build_omp_teams (gimple_seq body, tree clauses)
1047 {
1048 gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
1049 if (body)
1050 gimple_omp_set_body (p, body);
1051 gimple_omp_teams_set_clauses (p, clauses);
1052
1053 return p;
1054 }
1055
1056
1057 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1058
1059 gimple
1060 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1061 {
1062 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1063 gimple_omp_atomic_load_set_lhs (p, lhs);
1064 gimple_omp_atomic_load_set_rhs (p, rhs);
1065 return p;
1066 }
1067
1068 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1069
1070 VAL is the value we are storing. */
1071
1072 gimple
1073 gimple_build_omp_atomic_store (tree val)
1074 {
1075 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1076 gimple_omp_atomic_store_set_val (p, val);
1077 return p;
1078 }
1079
1080 /* Build a GIMPLE_TRANSACTION statement. */
1081
1082 gimple
1083 gimple_build_transaction (gimple_seq body, tree label)
1084 {
1085 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1086 gimple_transaction_set_body (p, body);
1087 gimple_transaction_set_label (p, label);
1088 return p;
1089 }
1090
1091 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1092 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1093
1094 gimple
1095 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1096 {
1097 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1098 /* Ensure all the predictors fit into the lower bits of the subcode. */
1099 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1100 gimple_predict_set_predictor (p, predictor);
1101 gimple_predict_set_outcome (p, outcome);
1102 return p;
1103 }
1104
1105 #if defined ENABLE_GIMPLE_CHECKING
1106 /* Complain of a gimple type mismatch and die. */
1107
1108 void
1109 gimple_check_failed (const_gimple gs, const char *file, int line,
1110 const char *function, enum gimple_code code,
1111 enum tree_code subcode)
1112 {
1113 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1114 gimple_code_name[code],
1115 get_tree_code_name (subcode),
1116 gimple_code_name[gimple_code (gs)],
1117 gs->gsbase.subcode > 0
1118 ? get_tree_code_name ((enum tree_code) gs->gsbase.subcode)
1119 : "",
1120 function, trim_filename (file), line);
1121 }
1122 #endif /* ENABLE_GIMPLE_CHECKING */
1123
1124
1125 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1126 *SEQ_P is NULL, a new sequence is allocated. */
1127
1128 void
1129 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1130 {
1131 gimple_stmt_iterator si;
1132 if (gs == NULL)
1133 return;
1134
1135 si = gsi_last (*seq_p);
1136 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1137 }
1138
1139 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1140 *SEQ_P is NULL, a new sequence is allocated. This function is
1141 similar to gimple_seq_add_stmt, but does not scan the operands.
1142 During gimplification, we need to manipulate statement sequences
1143 before the def/use vectors have been constructed. */
1144
1145 void
1146 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
1147 {
1148 gimple_stmt_iterator si;
1149
1150 if (gs == NULL)
1151 return;
1152
1153 si = gsi_last (*seq_p);
1154 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1155 }
1156
1157 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1158 NULL, a new sequence is allocated. */
1159
1160 void
1161 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1162 {
1163 gimple_stmt_iterator si;
1164 if (src == NULL)
1165 return;
1166
1167 si = gsi_last (*dst_p);
1168 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1169 }
1170
1171 /* Determine whether to assign a location to the statement GS. */
1172
1173 static bool
1174 should_carry_location_p (gimple gs)
1175 {
1176 /* Don't emit a line note for a label. We particularly don't want to
1177 emit one for the break label, since it doesn't actually correspond
1178 to the beginning of the loop/switch. */
1179 if (gimple_code (gs) == GIMPLE_LABEL)
1180 return false;
1181
1182 return true;
1183 }
1184
1185 /* Set the location for gimple statement GS to LOCATION. */
1186
1187 static void
1188 annotate_one_with_location (gimple gs, location_t location)
1189 {
1190 if (!gimple_has_location (gs)
1191 && !gimple_do_not_emit_location_p (gs)
1192 && should_carry_location_p (gs))
1193 gimple_set_location (gs, location);
1194 }
1195
1196 /* Set LOCATION for all the statements after iterator GSI in sequence
1197 SEQ. If GSI is pointing to the end of the sequence, start with the
1198 first statement in SEQ. */
1199
1200 void
1201 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1202 location_t location)
1203 {
1204 if (gsi_end_p (gsi))
1205 gsi = gsi_start (seq);
1206 else
1207 gsi_next (&gsi);
1208
1209 for (; !gsi_end_p (gsi); gsi_next (&gsi))
1210 annotate_one_with_location (gsi_stmt (gsi), location);
1211 }
1212
1213 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
1214
1215 void
1216 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1217 {
1218 gimple_stmt_iterator i;
1219
1220 if (gimple_seq_empty_p (stmt_p))
1221 return;
1222
1223 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1224 {
1225 gimple gs = gsi_stmt (i);
1226 annotate_one_with_location (gs, location);
1227 }
1228 }
1229
1230 /* Helper function of empty_body_p. Return true if STMT is an empty
1231 statement. */
1232
1233 static bool
1234 empty_stmt_p (gimple stmt)
1235 {
1236 if (gimple_code (stmt) == GIMPLE_NOP)
1237 return true;
1238 if (gimple_code (stmt) == GIMPLE_BIND)
1239 return empty_body_p (gimple_bind_body (stmt));
1240 return false;
1241 }
1242
1243
1244 /* Return true if BODY contains nothing but empty statements. */
1245
1246 bool
1247 empty_body_p (gimple_seq body)
1248 {
1249 gimple_stmt_iterator i;
1250
1251 if (gimple_seq_empty_p (body))
1252 return true;
1253 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1254 if (!empty_stmt_p (gsi_stmt (i))
1255 && !is_gimple_debug (gsi_stmt (i)))
1256 return false;
1257
1258 return true;
1259 }
1260
1261
1262 /* Perform a deep copy of sequence SRC and return the result. */
1263
1264 gimple_seq
1265 gimple_seq_copy (gimple_seq src)
1266 {
1267 gimple_stmt_iterator gsi;
1268 gimple_seq new_seq = NULL;
1269 gimple stmt;
1270
1271 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1272 {
1273 stmt = gimple_copy (gsi_stmt (gsi));
1274 gimple_seq_add_stmt (&new_seq, stmt);
1275 }
1276
1277 return new_seq;
1278 }
1279
1280
1281
1282 /* Return true if calls C1 and C2 are known to go to the same function. */
1283
1284 bool
1285 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1286 {
1287 if (gimple_call_internal_p (c1))
1288 return (gimple_call_internal_p (c2)
1289 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1290 else
1291 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1292 || (gimple_call_fndecl (c1)
1293 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1294 }
1295
1296 /* Detect flags from a GIMPLE_CALL. This is just like
1297 call_expr_flags, but for gimple tuples. */
1298
1299 int
1300 gimple_call_flags (const_gimple stmt)
1301 {
1302 int flags;
1303 tree decl = gimple_call_fndecl (stmt);
1304
1305 if (decl)
1306 flags = flags_from_decl_or_type (decl);
1307 else if (gimple_call_internal_p (stmt))
1308 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1309 else
1310 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1311
1312 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1313 flags |= ECF_NOTHROW;
1314
1315 return flags;
1316 }
1317
1318 /* Return the "fn spec" string for call STMT. */
1319
1320 static tree
1321 gimple_call_fnspec (const_gimple stmt)
1322 {
1323 tree type, attr;
1324
1325 type = gimple_call_fntype (stmt);
1326 if (!type)
1327 return NULL_TREE;
1328
1329 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1330 if (!attr)
1331 return NULL_TREE;
1332
1333 return TREE_VALUE (TREE_VALUE (attr));
1334 }
1335
1336 /* Detects argument flags for argument number ARG on call STMT. */
1337
1338 int
1339 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1340 {
1341 tree attr = gimple_call_fnspec (stmt);
1342
1343 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1344 return 0;
1345
1346 switch (TREE_STRING_POINTER (attr)[1 + arg])
1347 {
1348 case 'x':
1349 case 'X':
1350 return EAF_UNUSED;
1351
1352 case 'R':
1353 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1354
1355 case 'r':
1356 return EAF_NOCLOBBER | EAF_NOESCAPE;
1357
1358 case 'W':
1359 return EAF_DIRECT | EAF_NOESCAPE;
1360
1361 case 'w':
1362 return EAF_NOESCAPE;
1363
1364 case '.':
1365 default:
1366 return 0;
1367 }
1368 }
1369
1370 /* Detects return flags for the call STMT. */
1371
1372 int
1373 gimple_call_return_flags (const_gimple stmt)
1374 {
1375 tree attr;
1376
1377 if (gimple_call_flags (stmt) & ECF_MALLOC)
1378 return ERF_NOALIAS;
1379
1380 attr = gimple_call_fnspec (stmt);
1381 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1382 return 0;
1383
1384 switch (TREE_STRING_POINTER (attr)[0])
1385 {
1386 case '1':
1387 case '2':
1388 case '3':
1389 case '4':
1390 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1391
1392 case 'm':
1393 return ERF_NOALIAS;
1394
1395 case '.':
1396 default:
1397 return 0;
1398 }
1399 }
1400
1401
1402 /* Return true if GS is a copy assignment. */
1403
1404 bool
1405 gimple_assign_copy_p (gimple gs)
1406 {
1407 return (gimple_assign_single_p (gs)
1408 && is_gimple_val (gimple_op (gs, 1)));
1409 }
1410
1411
1412 /* Return true if GS is a SSA_NAME copy assignment. */
1413
1414 bool
1415 gimple_assign_ssa_name_copy_p (gimple gs)
1416 {
1417 return (gimple_assign_single_p (gs)
1418 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1419 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1420 }
1421
1422
1423 /* Return true if GS is an assignment with a unary RHS, but the
1424 operator has no effect on the assigned value. The logic is adapted
1425 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1426 instances in which STRIP_NOPS was previously applied to the RHS of
1427 an assignment.
1428
1429 NOTE: In the use cases that led to the creation of this function
1430 and of gimple_assign_single_p, it is typical to test for either
1431 condition and to proceed in the same manner. In each case, the
1432 assigned value is represented by the single RHS operand of the
1433 assignment. I suspect there may be cases where gimple_assign_copy_p,
1434 gimple_assign_single_p, or equivalent logic is used where a similar
1435 treatment of unary NOPs is appropriate. */
1436
1437 bool
1438 gimple_assign_unary_nop_p (gimple gs)
1439 {
1440 return (is_gimple_assign (gs)
1441 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1442 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1443 && gimple_assign_rhs1 (gs) != error_mark_node
1444 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1445 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1446 }
1447
1448 /* Set BB to be the basic block holding G. */
1449
1450 void
1451 gimple_set_bb (gimple stmt, basic_block bb)
1452 {
1453 stmt->gsbase.bb = bb;
1454
1455 /* If the statement is a label, add the label to block-to-labels map
1456 so that we can speed up edge creation for GIMPLE_GOTOs. */
1457 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1458 {
1459 tree t;
1460 int uid;
1461
1462 t = gimple_label_label (stmt);
1463 uid = LABEL_DECL_UID (t);
1464 if (uid == -1)
1465 {
1466 unsigned old_len = vec_safe_length (label_to_block_map);
1467 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1468 if (old_len <= (unsigned) uid)
1469 {
1470 unsigned new_len = 3 * uid / 2 + 1;
1471
1472 vec_safe_grow_cleared (label_to_block_map, new_len);
1473 }
1474 }
1475
1476 (*label_to_block_map)[uid] = bb;
1477 }
1478 }
1479
1480
1481 /* Modify the RHS of the assignment pointed-to by GSI using the
1482 operands in the expression tree EXPR.
1483
1484 NOTE: The statement pointed-to by GSI may be reallocated if it
1485 did not have enough operand slots.
1486
1487 This function is useful to convert an existing tree expression into
1488 the flat representation used for the RHS of a GIMPLE assignment.
1489 It will reallocate memory as needed to expand or shrink the number
1490 of operand slots needed to represent EXPR.
1491
1492 NOTE: If you find yourself building a tree and then calling this
1493 function, you are most certainly doing it the slow way. It is much
1494 better to build a new assignment or to use the function
1495 gimple_assign_set_rhs_with_ops, which does not require an
1496 expression tree to be built. */
1497
1498 void
1499 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1500 {
1501 enum tree_code subcode;
1502 tree op1, op2, op3;
1503
1504 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
1505 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
1506 }
1507
1508
1509 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1510 operands OP1, OP2 and OP3.
1511
1512 NOTE: The statement pointed-to by GSI may be reallocated if it
1513 did not have enough operand slots. */
1514
1515 void
1516 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
1517 tree op1, tree op2, tree op3)
1518 {
1519 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1520 gimple stmt = gsi_stmt (*gsi);
1521
1522 /* If the new CODE needs more operands, allocate a new statement. */
1523 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1524 {
1525 tree lhs = gimple_assign_lhs (stmt);
1526 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1527 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1528 gimple_init_singleton (new_stmt);
1529 gsi_replace (gsi, new_stmt, true);
1530 stmt = new_stmt;
1531
1532 /* The LHS needs to be reset as this also changes the SSA name
1533 on the LHS. */
1534 gimple_assign_set_lhs (stmt, lhs);
1535 }
1536
1537 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1538 gimple_set_subcode (stmt, code);
1539 gimple_assign_set_rhs1 (stmt, op1);
1540 if (new_rhs_ops > 1)
1541 gimple_assign_set_rhs2 (stmt, op2);
1542 if (new_rhs_ops > 2)
1543 gimple_assign_set_rhs3 (stmt, op3);
1544 }
1545
1546
1547 /* Return the LHS of a statement that performs an assignment,
1548 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1549 for a call to a function that returns no value, or for a
1550 statement other than an assignment or a call. */
1551
1552 tree
1553 gimple_get_lhs (const_gimple stmt)
1554 {
1555 enum gimple_code code = gimple_code (stmt);
1556
1557 if (code == GIMPLE_ASSIGN)
1558 return gimple_assign_lhs (stmt);
1559 else if (code == GIMPLE_CALL)
1560 return gimple_call_lhs (stmt);
1561 else
1562 return NULL_TREE;
1563 }
1564
1565
1566 /* Set the LHS of a statement that performs an assignment,
1567 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1568
1569 void
1570 gimple_set_lhs (gimple stmt, tree lhs)
1571 {
1572 enum gimple_code code = gimple_code (stmt);
1573
1574 if (code == GIMPLE_ASSIGN)
1575 gimple_assign_set_lhs (stmt, lhs);
1576 else if (code == GIMPLE_CALL)
1577 gimple_call_set_lhs (stmt, lhs);
1578 else
1579 gcc_unreachable ();
1580 }
1581
1582
1583 /* Return a deep copy of statement STMT. All the operands from STMT
1584 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1585 and VUSE operand arrays are set to empty in the new copy. The new
1586 copy isn't part of any sequence. */
1587
1588 gimple
1589 gimple_copy (gimple stmt)
1590 {
1591 enum gimple_code code = gimple_code (stmt);
1592 unsigned num_ops = gimple_num_ops (stmt);
1593 gimple copy = gimple_alloc (code, num_ops);
1594 unsigned i;
1595
1596 /* Shallow copy all the fields from STMT. */
1597 memcpy (copy, stmt, gimple_size (code));
1598 gimple_init_singleton (copy);
1599
1600 /* If STMT has sub-statements, deep-copy them as well. */
1601 if (gimple_has_substatements (stmt))
1602 {
1603 gimple_seq new_seq;
1604 tree t;
1605
1606 switch (gimple_code (stmt))
1607 {
1608 case GIMPLE_BIND:
1609 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
1610 gimple_bind_set_body (copy, new_seq);
1611 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
1612 gimple_bind_set_block (copy, gimple_bind_block (stmt));
1613 break;
1614
1615 case GIMPLE_CATCH:
1616 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
1617 gimple_catch_set_handler (copy, new_seq);
1618 t = unshare_expr (gimple_catch_types (stmt));
1619 gimple_catch_set_types (copy, t);
1620 break;
1621
1622 case GIMPLE_EH_FILTER:
1623 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
1624 gimple_eh_filter_set_failure (copy, new_seq);
1625 t = unshare_expr (gimple_eh_filter_types (stmt));
1626 gimple_eh_filter_set_types (copy, t);
1627 break;
1628
1629 case GIMPLE_EH_ELSE:
1630 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
1631 gimple_eh_else_set_n_body (copy, new_seq);
1632 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
1633 gimple_eh_else_set_e_body (copy, new_seq);
1634 break;
1635
1636 case GIMPLE_TRY:
1637 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
1638 gimple_try_set_eval (copy, new_seq);
1639 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
1640 gimple_try_set_cleanup (copy, new_seq);
1641 break;
1642
1643 case GIMPLE_OMP_FOR:
1644 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1645 gimple_omp_for_set_pre_body (copy, new_seq);
1646 t = unshare_expr (gimple_omp_for_clauses (stmt));
1647 gimple_omp_for_set_clauses (copy, t);
1648 copy->gimple_omp_for.iter
1649 = ggc_alloc_vec_gimple_omp_for_iter
1650 (gimple_omp_for_collapse (stmt));
1651 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1652 {
1653 gimple_omp_for_set_cond (copy, i,
1654 gimple_omp_for_cond (stmt, i));
1655 gimple_omp_for_set_index (copy, i,
1656 gimple_omp_for_index (stmt, i));
1657 t = unshare_expr (gimple_omp_for_initial (stmt, i));
1658 gimple_omp_for_set_initial (copy, i, t);
1659 t = unshare_expr (gimple_omp_for_final (stmt, i));
1660 gimple_omp_for_set_final (copy, i, t);
1661 t = unshare_expr (gimple_omp_for_incr (stmt, i));
1662 gimple_omp_for_set_incr (copy, i, t);
1663 }
1664 goto copy_omp_body;
1665
1666 case GIMPLE_OMP_PARALLEL:
1667 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
1668 gimple_omp_parallel_set_clauses (copy, t);
1669 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
1670 gimple_omp_parallel_set_child_fn (copy, t);
1671 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
1672 gimple_omp_parallel_set_data_arg (copy, t);
1673 goto copy_omp_body;
1674
1675 case GIMPLE_OMP_TASK:
1676 t = unshare_expr (gimple_omp_task_clauses (stmt));
1677 gimple_omp_task_set_clauses (copy, t);
1678 t = unshare_expr (gimple_omp_task_child_fn (stmt));
1679 gimple_omp_task_set_child_fn (copy, t);
1680 t = unshare_expr (gimple_omp_task_data_arg (stmt));
1681 gimple_omp_task_set_data_arg (copy, t);
1682 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1683 gimple_omp_task_set_copy_fn (copy, t);
1684 t = unshare_expr (gimple_omp_task_arg_size (stmt));
1685 gimple_omp_task_set_arg_size (copy, t);
1686 t = unshare_expr (gimple_omp_task_arg_align (stmt));
1687 gimple_omp_task_set_arg_align (copy, t);
1688 goto copy_omp_body;
1689
1690 case GIMPLE_OMP_CRITICAL:
1691 t = unshare_expr (gimple_omp_critical_name (stmt));
1692 gimple_omp_critical_set_name (copy, t);
1693 goto copy_omp_body;
1694
1695 case GIMPLE_OMP_SECTIONS:
1696 t = unshare_expr (gimple_omp_sections_clauses (stmt));
1697 gimple_omp_sections_set_clauses (copy, t);
1698 t = unshare_expr (gimple_omp_sections_control (stmt));
1699 gimple_omp_sections_set_control (copy, t);
1700 /* FALLTHRU */
1701
1702 case GIMPLE_OMP_SINGLE:
1703 case GIMPLE_OMP_TARGET:
1704 case GIMPLE_OMP_TEAMS:
1705 case GIMPLE_OMP_SECTION:
1706 case GIMPLE_OMP_MASTER:
1707 case GIMPLE_OMP_TASKGROUP:
1708 case GIMPLE_OMP_ORDERED:
1709 copy_omp_body:
1710 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1711 gimple_omp_set_body (copy, new_seq);
1712 break;
1713
1714 case GIMPLE_TRANSACTION:
1715 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
1716 gimple_transaction_set_body (copy, new_seq);
1717 break;
1718
1719 case GIMPLE_WITH_CLEANUP_EXPR:
1720 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1721 gimple_wce_set_cleanup (copy, new_seq);
1722 break;
1723
1724 default:
1725 gcc_unreachable ();
1726 }
1727 }
1728
1729 /* Make copy of operands. */
1730 for (i = 0; i < num_ops; i++)
1731 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
1732
1733 if (gimple_has_mem_ops (stmt))
1734 {
1735 gimple_set_vdef (copy, gimple_vdef (stmt));
1736 gimple_set_vuse (copy, gimple_vuse (stmt));
1737 }
1738
1739 /* Clear out SSA operand vectors on COPY. */
1740 if (gimple_has_ops (stmt))
1741 {
1742 gimple_set_use_ops (copy, NULL);
1743
1744 /* SSA operands need to be updated. */
1745 gimple_set_modified (copy, true);
1746 }
1747
1748 return copy;
1749 }
1750
1751
1752 /* Return true if statement S has side-effects. We consider a
1753 statement to have side effects if:
1754
1755 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1756 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
1757
1758 bool
1759 gimple_has_side_effects (const_gimple s)
1760 {
1761 if (is_gimple_debug (s))
1762 return false;
1763
1764 /* We don't have to scan the arguments to check for
1765 volatile arguments, though, at present, we still
1766 do a scan to check for TREE_SIDE_EFFECTS. */
1767 if (gimple_has_volatile_ops (s))
1768 return true;
1769
1770 if (gimple_code (s) == GIMPLE_ASM
1771 && gimple_asm_volatile_p (s))
1772 return true;
1773
1774 if (is_gimple_call (s))
1775 {
1776 int flags = gimple_call_flags (s);
1777
1778 /* An infinite loop is considered a side effect. */
1779 if (!(flags & (ECF_CONST | ECF_PURE))
1780 || (flags & ECF_LOOPING_CONST_OR_PURE))
1781 return true;
1782
1783 return false;
1784 }
1785
1786 return false;
1787 }
1788
1789 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1790 Return true if S can trap. When INCLUDE_MEM is true, check whether
1791 the memory operations could trap. When INCLUDE_STORES is true and
1792 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
1793
1794 bool
1795 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
1796 {
1797 tree t, div = NULL_TREE;
1798 enum tree_code op;
1799
1800 if (include_mem)
1801 {
1802 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
1803
1804 for (i = start; i < gimple_num_ops (s); i++)
1805 if (tree_could_trap_p (gimple_op (s, i)))
1806 return true;
1807 }
1808
1809 switch (gimple_code (s))
1810 {
1811 case GIMPLE_ASM:
1812 return gimple_asm_volatile_p (s);
1813
1814 case GIMPLE_CALL:
1815 t = gimple_call_fndecl (s);
1816 /* Assume that calls to weak functions may trap. */
1817 if (!t || !DECL_P (t) || DECL_WEAK (t))
1818 return true;
1819 return false;
1820
1821 case GIMPLE_ASSIGN:
1822 t = gimple_expr_type (s);
1823 op = gimple_assign_rhs_code (s);
1824 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
1825 div = gimple_assign_rhs2 (s);
1826 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
1827 (INTEGRAL_TYPE_P (t)
1828 && TYPE_OVERFLOW_TRAPS (t)),
1829 div));
1830
1831 default:
1832 break;
1833 }
1834
1835 return false;
1836 }
1837
1838 /* Return true if statement S can trap. */
1839
1840 bool
1841 gimple_could_trap_p (gimple s)
1842 {
1843 return gimple_could_trap_p_1 (s, true, true);
1844 }
1845
1846 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
1847
1848 bool
1849 gimple_assign_rhs_could_trap_p (gimple s)
1850 {
1851 gcc_assert (is_gimple_assign (s));
1852 return gimple_could_trap_p_1 (s, true, false);
1853 }
1854
1855
1856 /* Print debugging information for gimple stmts generated. */
1857
1858 void
1859 dump_gimple_statistics (void)
1860 {
1861 int i, total_tuples = 0, total_bytes = 0;
1862
1863 if (! GATHER_STATISTICS)
1864 {
1865 fprintf (stderr, "No gimple statistics\n");
1866 return;
1867 }
1868
1869 fprintf (stderr, "\nGIMPLE statements\n");
1870 fprintf (stderr, "Kind Stmts Bytes\n");
1871 fprintf (stderr, "---------------------------------------\n");
1872 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
1873 {
1874 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
1875 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
1876 total_tuples += gimple_alloc_counts[i];
1877 total_bytes += gimple_alloc_sizes[i];
1878 }
1879 fprintf (stderr, "---------------------------------------\n");
1880 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
1881 fprintf (stderr, "---------------------------------------\n");
1882 }
1883
1884
1885 /* Return the number of operands needed on the RHS of a GIMPLE
1886 assignment for an expression with tree code CODE. */
1887
1888 unsigned
1889 get_gimple_rhs_num_ops (enum tree_code code)
1890 {
1891 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
1892
1893 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
1894 return 1;
1895 else if (rhs_class == GIMPLE_BINARY_RHS)
1896 return 2;
1897 else if (rhs_class == GIMPLE_TERNARY_RHS)
1898 return 3;
1899 else
1900 gcc_unreachable ();
1901 }
1902
1903 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
1904 (unsigned char) \
1905 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
1906 : ((TYPE) == tcc_binary \
1907 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
1908 : ((TYPE) == tcc_constant \
1909 || (TYPE) == tcc_declaration \
1910 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
1911 : ((SYM) == TRUTH_AND_EXPR \
1912 || (SYM) == TRUTH_OR_EXPR \
1913 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
1914 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
1915 : ((SYM) == COND_EXPR \
1916 || (SYM) == WIDEN_MULT_PLUS_EXPR \
1917 || (SYM) == WIDEN_MULT_MINUS_EXPR \
1918 || (SYM) == DOT_PROD_EXPR \
1919 || (SYM) == REALIGN_LOAD_EXPR \
1920 || (SYM) == VEC_COND_EXPR \
1921 || (SYM) == VEC_PERM_EXPR \
1922 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
1923 : ((SYM) == CONSTRUCTOR \
1924 || (SYM) == OBJ_TYPE_REF \
1925 || (SYM) == ASSERT_EXPR \
1926 || (SYM) == ADDR_EXPR \
1927 || (SYM) == WITH_SIZE_EXPR \
1928 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
1929 : GIMPLE_INVALID_RHS),
1930 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
1931
1932 const unsigned char gimple_rhs_class_table[] = {
1933 #include "all-tree.def"
1934 };
1935
1936 #undef DEFTREECODE
1937 #undef END_OF_BASE_TREE_CODES
1938
1939 void
1940 recalculate_side_effects (tree t)
1941 {
1942 enum tree_code code = TREE_CODE (t);
1943 int len = TREE_OPERAND_LENGTH (t);
1944 int i;
1945
1946 switch (TREE_CODE_CLASS (code))
1947 {
1948 case tcc_expression:
1949 switch (code)
1950 {
1951 case INIT_EXPR:
1952 case MODIFY_EXPR:
1953 case VA_ARG_EXPR:
1954 case PREDECREMENT_EXPR:
1955 case PREINCREMENT_EXPR:
1956 case POSTDECREMENT_EXPR:
1957 case POSTINCREMENT_EXPR:
1958 /* All of these have side-effects, no matter what their
1959 operands are. */
1960 return;
1961
1962 default:
1963 break;
1964 }
1965 /* Fall through. */
1966
1967 case tcc_comparison: /* a comparison expression */
1968 case tcc_unary: /* a unary arithmetic expression */
1969 case tcc_binary: /* a binary arithmetic expression */
1970 case tcc_reference: /* a reference */
1971 case tcc_vl_exp: /* a function call */
1972 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1973 for (i = 0; i < len; ++i)
1974 {
1975 tree op = TREE_OPERAND (t, i);
1976 if (op && TREE_SIDE_EFFECTS (op))
1977 TREE_SIDE_EFFECTS (t) = 1;
1978 }
1979 break;
1980
1981 case tcc_constant:
1982 /* No side-effects. */
1983 return;
1984
1985 default:
1986 gcc_unreachable ();
1987 }
1988 }
1989
1990 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
1991 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
1992 we failed to create one. */
1993
1994 tree
1995 canonicalize_cond_expr_cond (tree t)
1996 {
1997 /* Strip conversions around boolean operations. */
1998 if (CONVERT_EXPR_P (t)
1999 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2000 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2001 == BOOLEAN_TYPE))
2002 t = TREE_OPERAND (t, 0);
2003
2004 /* For !x use x == 0. */
2005 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2006 {
2007 tree top0 = TREE_OPERAND (t, 0);
2008 t = build2 (EQ_EXPR, TREE_TYPE (t),
2009 top0, build_int_cst (TREE_TYPE (top0), 0));
2010 }
2011 /* For cmp ? 1 : 0 use cmp. */
2012 else if (TREE_CODE (t) == COND_EXPR
2013 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2014 && integer_onep (TREE_OPERAND (t, 1))
2015 && integer_zerop (TREE_OPERAND (t, 2)))
2016 {
2017 tree top0 = TREE_OPERAND (t, 0);
2018 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2019 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2020 }
2021 /* For x ^ y use x != y. */
2022 else if (TREE_CODE (t) == BIT_XOR_EXPR)
2023 t = build2 (NE_EXPR, TREE_TYPE (t),
2024 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2025
2026 if (is_gimple_condexpr (t))
2027 return t;
2028
2029 return NULL_TREE;
2030 }
2031
2032 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2033 the positions marked by the set ARGS_TO_SKIP. */
2034
2035 gimple
2036 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
2037 {
2038 int i;
2039 int nargs = gimple_call_num_args (stmt);
2040 vec<tree> vargs;
2041 vargs.create (nargs);
2042 gimple new_stmt;
2043
2044 for (i = 0; i < nargs; i++)
2045 if (!bitmap_bit_p (args_to_skip, i))
2046 vargs.quick_push (gimple_call_arg (stmt, i));
2047
2048 if (gimple_call_internal_p (stmt))
2049 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2050 vargs);
2051 else
2052 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2053 vargs.release ();
2054 if (gimple_call_lhs (stmt))
2055 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2056
2057 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2058 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2059
2060 if (gimple_has_location (stmt))
2061 gimple_set_location (new_stmt, gimple_location (stmt));
2062 gimple_call_copy_flags (new_stmt, stmt);
2063 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2064
2065 gimple_set_modified (new_stmt, true);
2066
2067 return new_stmt;
2068 }
2069
2070
2071
2072 /* Return true if the field decls F1 and F2 are at the same offset.
2073
2074 This is intended to be used on GIMPLE types only. */
2075
2076 bool
2077 gimple_compare_field_offset (tree f1, tree f2)
2078 {
2079 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2080 {
2081 tree offset1 = DECL_FIELD_OFFSET (f1);
2082 tree offset2 = DECL_FIELD_OFFSET (f2);
2083 return ((offset1 == offset2
2084 /* Once gimplification is done, self-referential offsets are
2085 instantiated as operand #2 of the COMPONENT_REF built for
2086 each access and reset. Therefore, they are not relevant
2087 anymore and fields are interchangeable provided that they
2088 represent the same access. */
2089 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2090 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2091 && (DECL_SIZE (f1) == DECL_SIZE (f2)
2092 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2093 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2094 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2095 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2096 || operand_equal_p (offset1, offset2, 0))
2097 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2098 DECL_FIELD_BIT_OFFSET (f2)));
2099 }
2100
2101 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2102 should be, so handle differing ones specially by decomposing
2103 the offset into a byte and bit offset manually. */
2104 if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2105 && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2106 {
2107 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2108 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2109 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2110 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2111 + bit_offset1 / BITS_PER_UNIT);
2112 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2113 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2114 + bit_offset2 / BITS_PER_UNIT);
2115 if (byte_offset1 != byte_offset2)
2116 return false;
2117 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2118 }
2119
2120 return false;
2121 }
2122
2123
2124 /* Return a type the same as TYPE except unsigned or
2125 signed according to UNSIGNEDP. */
2126
2127 static tree
2128 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2129 {
2130 tree type1;
2131
2132 type1 = TYPE_MAIN_VARIANT (type);
2133 if (type1 == signed_char_type_node
2134 || type1 == char_type_node
2135 || type1 == unsigned_char_type_node)
2136 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2137 if (type1 == integer_type_node || type1 == unsigned_type_node)
2138 return unsignedp ? unsigned_type_node : integer_type_node;
2139 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2140 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2141 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2142 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2143 if (type1 == long_long_integer_type_node
2144 || type1 == long_long_unsigned_type_node)
2145 return unsignedp
2146 ? long_long_unsigned_type_node
2147 : long_long_integer_type_node;
2148 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
2149 return unsignedp
2150 ? int128_unsigned_type_node
2151 : int128_integer_type_node;
2152 #if HOST_BITS_PER_WIDE_INT >= 64
2153 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2154 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2155 #endif
2156 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2157 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2158 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2159 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2160 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2161 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2162 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2163 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2164
2165 #define GIMPLE_FIXED_TYPES(NAME) \
2166 if (type1 == short_ ## NAME ## _type_node \
2167 || type1 == unsigned_short_ ## NAME ## _type_node) \
2168 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2169 : short_ ## NAME ## _type_node; \
2170 if (type1 == NAME ## _type_node \
2171 || type1 == unsigned_ ## NAME ## _type_node) \
2172 return unsignedp ? unsigned_ ## NAME ## _type_node \
2173 : NAME ## _type_node; \
2174 if (type1 == long_ ## NAME ## _type_node \
2175 || type1 == unsigned_long_ ## NAME ## _type_node) \
2176 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2177 : long_ ## NAME ## _type_node; \
2178 if (type1 == long_long_ ## NAME ## _type_node \
2179 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2180 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2181 : long_long_ ## NAME ## _type_node;
2182
2183 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2184 if (type1 == NAME ## _type_node \
2185 || type1 == u ## NAME ## _type_node) \
2186 return unsignedp ? u ## NAME ## _type_node \
2187 : NAME ## _type_node;
2188
2189 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2190 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2191 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2192 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2193 : sat_ ## short_ ## NAME ## _type_node; \
2194 if (type1 == sat_ ## NAME ## _type_node \
2195 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2196 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2197 : sat_ ## NAME ## _type_node; \
2198 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2199 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2200 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2201 : sat_ ## long_ ## NAME ## _type_node; \
2202 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2203 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2204 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2205 : sat_ ## long_long_ ## NAME ## _type_node;
2206
2207 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
2208 if (type1 == sat_ ## NAME ## _type_node \
2209 || type1 == sat_ ## u ## NAME ## _type_node) \
2210 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2211 : sat_ ## NAME ## _type_node;
2212
2213 GIMPLE_FIXED_TYPES (fract);
2214 GIMPLE_FIXED_TYPES_SAT (fract);
2215 GIMPLE_FIXED_TYPES (accum);
2216 GIMPLE_FIXED_TYPES_SAT (accum);
2217
2218 GIMPLE_FIXED_MODE_TYPES (qq);
2219 GIMPLE_FIXED_MODE_TYPES (hq);
2220 GIMPLE_FIXED_MODE_TYPES (sq);
2221 GIMPLE_FIXED_MODE_TYPES (dq);
2222 GIMPLE_FIXED_MODE_TYPES (tq);
2223 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2224 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2225 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2226 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2227 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2228 GIMPLE_FIXED_MODE_TYPES (ha);
2229 GIMPLE_FIXED_MODE_TYPES (sa);
2230 GIMPLE_FIXED_MODE_TYPES (da);
2231 GIMPLE_FIXED_MODE_TYPES (ta);
2232 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2233 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2234 GIMPLE_FIXED_MODE_TYPES_SAT (da);
2235 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2236
2237 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2238 the precision; they have precision set to match their range, but
2239 may use a wider mode to match an ABI. If we change modes, we may
2240 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2241 the precision as well, so as to yield correct results for
2242 bit-field types. C++ does not have these separate bit-field
2243 types, and producing a signed or unsigned variant of an
2244 ENUMERAL_TYPE may cause other problems as well. */
2245 if (!INTEGRAL_TYPE_P (type)
2246 || TYPE_UNSIGNED (type) == unsignedp)
2247 return type;
2248
2249 #define TYPE_OK(node) \
2250 (TYPE_MODE (type) == TYPE_MODE (node) \
2251 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2252 if (TYPE_OK (signed_char_type_node))
2253 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2254 if (TYPE_OK (integer_type_node))
2255 return unsignedp ? unsigned_type_node : integer_type_node;
2256 if (TYPE_OK (short_integer_type_node))
2257 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2258 if (TYPE_OK (long_integer_type_node))
2259 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2260 if (TYPE_OK (long_long_integer_type_node))
2261 return (unsignedp
2262 ? long_long_unsigned_type_node
2263 : long_long_integer_type_node);
2264 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
2265 return (unsignedp
2266 ? int128_unsigned_type_node
2267 : int128_integer_type_node);
2268
2269 #if HOST_BITS_PER_WIDE_INT >= 64
2270 if (TYPE_OK (intTI_type_node))
2271 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2272 #endif
2273 if (TYPE_OK (intDI_type_node))
2274 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2275 if (TYPE_OK (intSI_type_node))
2276 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2277 if (TYPE_OK (intHI_type_node))
2278 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2279 if (TYPE_OK (intQI_type_node))
2280 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2281
2282 #undef GIMPLE_FIXED_TYPES
2283 #undef GIMPLE_FIXED_MODE_TYPES
2284 #undef GIMPLE_FIXED_TYPES_SAT
2285 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2286 #undef TYPE_OK
2287
2288 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2289 }
2290
2291
2292 /* Return an unsigned type the same as TYPE in other respects. */
2293
2294 tree
2295 gimple_unsigned_type (tree type)
2296 {
2297 return gimple_signed_or_unsigned_type (true, type);
2298 }
2299
2300
2301 /* Return a signed type the same as TYPE in other respects. */
2302
2303 tree
2304 gimple_signed_type (tree type)
2305 {
2306 return gimple_signed_or_unsigned_type (false, type);
2307 }
2308
2309
2310 /* Return the typed-based alias set for T, which may be an expression
2311 or a type. Return -1 if we don't do anything special. */
2312
2313 alias_set_type
2314 gimple_get_alias_set (tree t)
2315 {
2316 tree u;
2317
2318 /* Permit type-punning when accessing a union, provided the access
2319 is directly through the union. For example, this code does not
2320 permit taking the address of a union member and then storing
2321 through it. Even the type-punning allowed here is a GCC
2322 extension, albeit a common and useful one; the C standard says
2323 that such accesses have implementation-defined behavior. */
2324 for (u = t;
2325 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
2326 u = TREE_OPERAND (u, 0))
2327 if (TREE_CODE (u) == COMPONENT_REF
2328 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
2329 return 0;
2330
2331 /* That's all the expressions we handle specially. */
2332 if (!TYPE_P (t))
2333 return -1;
2334
2335 /* For convenience, follow the C standard when dealing with
2336 character types. Any object may be accessed via an lvalue that
2337 has character type. */
2338 if (t == char_type_node
2339 || t == signed_char_type_node
2340 || t == unsigned_char_type_node)
2341 return 0;
2342
2343 /* Allow aliasing between signed and unsigned variants of the same
2344 type. We treat the signed variant as canonical. */
2345 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2346 {
2347 tree t1 = gimple_signed_type (t);
2348
2349 /* t1 == t can happen for boolean nodes which are always unsigned. */
2350 if (t1 != t)
2351 return get_alias_set (t1);
2352 }
2353
2354 return -1;
2355 }
2356
2357
2358 /* Helper for gimple_ior_addresses_taken_1. */
2359
2360 static bool
2361 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
2362 tree addr, void *data)
2363 {
2364 bitmap addresses_taken = (bitmap)data;
2365 addr = get_base_address (addr);
2366 if (addr
2367 && DECL_P (addr))
2368 {
2369 bitmap_set_bit (addresses_taken, DECL_UID (addr));
2370 return true;
2371 }
2372 return false;
2373 }
2374
2375 /* Set the bit for the uid of all decls that have their address taken
2376 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
2377 were any in this stmt. */
2378
2379 bool
2380 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
2381 {
2382 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2383 gimple_ior_addresses_taken_1);
2384 }
2385
2386
2387 /* Return TRUE iff stmt is a call to a built-in function. */
2388
2389 bool
2390 is_gimple_builtin_call (gimple stmt)
2391 {
2392 tree callee;
2393
2394 if (is_gimple_call (stmt)
2395 && (callee = gimple_call_fndecl (stmt))
2396 && is_builtin_fn (callee)
2397 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2398 return true;
2399
2400 return false;
2401 }
2402
2403 /* Return true when STMTs arguments match those of FNDECL. */
2404
2405 static bool
2406 validate_call (gimple stmt, tree fndecl)
2407 {
2408 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2409 unsigned nargs = gimple_call_num_args (stmt);
2410 for (unsigned i = 0; i < nargs; ++i)
2411 {
2412 /* Variadic args follow. */
2413 if (!targs)
2414 return true;
2415 tree arg = gimple_call_arg (stmt, i);
2416 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2417 && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
2418 ;
2419 else if (POINTER_TYPE_P (TREE_TYPE (arg))
2420 && POINTER_TYPE_P (TREE_VALUE (targs)))
2421 ;
2422 else if (TREE_CODE (TREE_TYPE (arg))
2423 != TREE_CODE (TREE_VALUE (targs)))
2424 return false;
2425 targs = TREE_CHAIN (targs);
2426 }
2427 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2428 return false;
2429 return true;
2430 }
2431
2432 /* Return true when STMT is builtins call to CLASS. */
2433
2434 bool
2435 gimple_call_builtin_p (gimple stmt, enum built_in_class klass)
2436 {
2437 tree fndecl;
2438 if (is_gimple_call (stmt)
2439 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2440 && DECL_BUILT_IN_CLASS (fndecl) == klass)
2441 return validate_call (stmt, fndecl);
2442 return false;
2443 }
2444
2445 /* Return true when STMT is builtins call to CODE of CLASS. */
2446
2447 bool
2448 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
2449 {
2450 tree fndecl;
2451 if (is_gimple_call (stmt)
2452 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2453 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2454 && DECL_FUNCTION_CODE (fndecl) == code)
2455 return validate_call (stmt, fndecl);
2456 return false;
2457 }
2458
2459 /* Return true if STMT clobbers memory. STMT is required to be a
2460 GIMPLE_ASM. */
2461
2462 bool
2463 gimple_asm_clobbers_memory_p (const_gimple stmt)
2464 {
2465 unsigned i;
2466
2467 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2468 {
2469 tree op = gimple_asm_clobber_op (stmt, i);
2470 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2471 return true;
2472 }
2473
2474 return false;
2475 }
2476
2477 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
2478
2479 void
2480 dump_decl_set (FILE *file, bitmap set)
2481 {
2482 if (set)
2483 {
2484 bitmap_iterator bi;
2485 unsigned i;
2486
2487 fprintf (file, "{ ");
2488
2489 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2490 {
2491 fprintf (file, "D.%u", i);
2492 fprintf (file, " ");
2493 }
2494
2495 fprintf (file, "}");
2496 }
2497 else
2498 fprintf (file, "NIL");
2499 }
2500
2501 /* Return true when CALL is a call stmt that definitely doesn't
2502 free any memory or makes it unavailable otherwise. */
2503 bool
2504 nonfreeing_call_p (gimple call)
2505 {
2506 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2507 && gimple_call_flags (call) & ECF_LEAF)
2508 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2509 {
2510 /* Just in case these become ECF_LEAF in the future. */
2511 case BUILT_IN_FREE:
2512 case BUILT_IN_TM_FREE:
2513 case BUILT_IN_REALLOC:
2514 case BUILT_IN_STACK_RESTORE:
2515 return false;
2516 default:
2517 return true;
2518 }
2519
2520 return false;
2521 }
2522
2523 /* Callback for walk_stmt_load_store_ops.
2524
2525 Return TRUE if OP will dereference the tree stored in DATA, FALSE
2526 otherwise.
2527
2528 This routine only makes a superficial check for a dereference. Thus
2529 it must only be used if it is safe to return a false negative. */
2530 static bool
2531 check_loadstore (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
2532 {
2533 if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2534 && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
2535 return true;
2536 return false;
2537 }
2538
2539 /* If OP can be inferred to be non-zero after STMT executes, return true. */
2540
2541 bool
2542 infer_nonnull_range (gimple stmt, tree op)
2543 {
2544 /* We can only assume that a pointer dereference will yield
2545 non-NULL if -fdelete-null-pointer-checks is enabled. */
2546 if (!flag_delete_null_pointer_checks
2547 || !POINTER_TYPE_P (TREE_TYPE (op))
2548 || gimple_code (stmt) == GIMPLE_ASM)
2549 return false;
2550
2551 if (walk_stmt_load_store_ops (stmt, (void *)op,
2552 check_loadstore, check_loadstore))
2553 return true;
2554
2555 if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2556 {
2557 tree fntype = gimple_call_fntype (stmt);
2558 tree attrs = TYPE_ATTRIBUTES (fntype);
2559 for (; attrs; attrs = TREE_CHAIN (attrs))
2560 {
2561 attrs = lookup_attribute ("nonnull", attrs);
2562
2563 /* If "nonnull" wasn't specified, we know nothing about
2564 the argument. */
2565 if (attrs == NULL_TREE)
2566 return false;
2567
2568 /* If "nonnull" applies to all the arguments, then ARG
2569 is non-null if it's in the argument list. */
2570 if (TREE_VALUE (attrs) == NULL_TREE)
2571 {
2572 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2573 {
2574 if (operand_equal_p (op, gimple_call_arg (stmt, i), 0)
2575 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i))))
2576 return true;
2577 }
2578 return false;
2579 }
2580
2581 /* Now see if op appears in the nonnull list. */
2582 for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2583 {
2584 int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2585 tree arg = gimple_call_arg (stmt, idx);
2586 if (operand_equal_p (op, arg, 0))
2587 return true;
2588 }
2589 }
2590 }
2591
2592 /* If this function is marked as returning non-null, then we can
2593 infer OP is non-null if it is used in the return statement. */
2594 if (gimple_code (stmt) == GIMPLE_RETURN
2595 && gimple_return_retval (stmt)
2596 && operand_equal_p (gimple_return_retval (stmt), op, 0)
2597 && lookup_attribute ("returns_nonnull",
2598 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2599 return true;
2600
2601 return false;
2602 }
2603
2604 /* Compare two case labels. Because the front end should already have
2605 made sure that case ranges do not overlap, it is enough to only compare
2606 the CASE_LOW values of each case label. */
2607
2608 static int
2609 compare_case_labels (const void *p1, const void *p2)
2610 {
2611 const_tree const case1 = *(const_tree const*)p1;
2612 const_tree const case2 = *(const_tree const*)p2;
2613
2614 /* The 'default' case label always goes first. */
2615 if (!CASE_LOW (case1))
2616 return -1;
2617 else if (!CASE_LOW (case2))
2618 return 1;
2619 else
2620 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2621 }
2622
2623 /* Sort the case labels in LABEL_VEC in place in ascending order. */
2624
2625 void
2626 sort_case_labels (vec<tree> label_vec)
2627 {
2628 label_vec.qsort (compare_case_labels);
2629 }
2630 \f
2631 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2632
2633 LABELS is a vector that contains all case labels to look at.
2634
2635 INDEX_TYPE is the type of the switch index expression. Case labels
2636 in LABELS are discarded if their values are not in the value range
2637 covered by INDEX_TYPE. The remaining case label values are folded
2638 to INDEX_TYPE.
2639
2640 If a default case exists in LABELS, it is removed from LABELS and
2641 returned in DEFAULT_CASEP. If no default case exists, but the
2642 case labels already cover the whole range of INDEX_TYPE, a default
2643 case is returned pointing to one of the existing case labels.
2644 Otherwise DEFAULT_CASEP is set to NULL_TREE.
2645
2646 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2647 apply and no action is taken regardless of whether a default case is
2648 found or not. */
2649
2650 void
2651 preprocess_case_label_vec_for_gimple (vec<tree> labels,
2652 tree index_type,
2653 tree *default_casep)
2654 {
2655 tree min_value, max_value;
2656 tree default_case = NULL_TREE;
2657 size_t i, len;
2658
2659 i = 0;
2660 min_value = TYPE_MIN_VALUE (index_type);
2661 max_value = TYPE_MAX_VALUE (index_type);
2662 while (i < labels.length ())
2663 {
2664 tree elt = labels[i];
2665 tree low = CASE_LOW (elt);
2666 tree high = CASE_HIGH (elt);
2667 bool remove_element = FALSE;
2668
2669 if (low)
2670 {
2671 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2672 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2673
2674 /* This is a non-default case label, i.e. it has a value.
2675
2676 See if the case label is reachable within the range of
2677 the index type. Remove out-of-range case values. Turn
2678 case ranges into a canonical form (high > low strictly)
2679 and convert the case label values to the index type.
2680
2681 NB: The type of gimple_switch_index() may be the promoted
2682 type, but the case labels retain the original type. */
2683
2684 if (high)
2685 {
2686 /* This is a case range. Discard empty ranges.
2687 If the bounds or the range are equal, turn this
2688 into a simple (one-value) case. */
2689 int cmp = tree_int_cst_compare (high, low);
2690 if (cmp < 0)
2691 remove_element = TRUE;
2692 else if (cmp == 0)
2693 high = NULL_TREE;
2694 }
2695
2696 if (! high)
2697 {
2698 /* If the simple case value is unreachable, ignore it. */
2699 if ((TREE_CODE (min_value) == INTEGER_CST
2700 && tree_int_cst_compare (low, min_value) < 0)
2701 || (TREE_CODE (max_value) == INTEGER_CST
2702 && tree_int_cst_compare (low, max_value) > 0))
2703 remove_element = TRUE;
2704 else
2705 low = fold_convert (index_type, low);
2706 }
2707 else
2708 {
2709 /* If the entire case range is unreachable, ignore it. */
2710 if ((TREE_CODE (min_value) == INTEGER_CST
2711 && tree_int_cst_compare (high, min_value) < 0)
2712 || (TREE_CODE (max_value) == INTEGER_CST
2713 && tree_int_cst_compare (low, max_value) > 0))
2714 remove_element = TRUE;
2715 else
2716 {
2717 /* If the lower bound is less than the index type's
2718 minimum value, truncate the range bounds. */
2719 if (TREE_CODE (min_value) == INTEGER_CST
2720 && tree_int_cst_compare (low, min_value) < 0)
2721 low = min_value;
2722 low = fold_convert (index_type, low);
2723
2724 /* If the upper bound is greater than the index type's
2725 maximum value, truncate the range bounds. */
2726 if (TREE_CODE (max_value) == INTEGER_CST
2727 && tree_int_cst_compare (high, max_value) > 0)
2728 high = max_value;
2729 high = fold_convert (index_type, high);
2730
2731 /* We may have folded a case range to a one-value case. */
2732 if (tree_int_cst_equal (low, high))
2733 high = NULL_TREE;
2734 }
2735 }
2736
2737 CASE_LOW (elt) = low;
2738 CASE_HIGH (elt) = high;
2739 }
2740 else
2741 {
2742 gcc_assert (!default_case);
2743 default_case = elt;
2744 /* The default case must be passed separately to the
2745 gimple_build_switch routine. But if DEFAULT_CASEP
2746 is NULL, we do not remove the default case (it would
2747 be completely lost). */
2748 if (default_casep)
2749 remove_element = TRUE;
2750 }
2751
2752 if (remove_element)
2753 labels.ordered_remove (i);
2754 else
2755 i++;
2756 }
2757 len = i;
2758
2759 if (!labels.is_empty ())
2760 sort_case_labels (labels);
2761
2762 if (default_casep && !default_case)
2763 {
2764 /* If the switch has no default label, add one, so that we jump
2765 around the switch body. If the labels already cover the whole
2766 range of the switch index_type, add the default label pointing
2767 to one of the existing labels. */
2768 if (len
2769 && TYPE_MIN_VALUE (index_type)
2770 && TYPE_MAX_VALUE (index_type)
2771 && tree_int_cst_equal (CASE_LOW (labels[0]),
2772 TYPE_MIN_VALUE (index_type)))
2773 {
2774 tree low, high = CASE_HIGH (labels[len - 1]);
2775 if (!high)
2776 high = CASE_LOW (labels[len - 1]);
2777 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
2778 {
2779 for (i = 1; i < len; i++)
2780 {
2781 high = CASE_LOW (labels[i]);
2782 low = CASE_HIGH (labels[i - 1]);
2783 if (!low)
2784 low = CASE_LOW (labels[i - 1]);
2785 if ((TREE_INT_CST_LOW (low) + 1
2786 != TREE_INT_CST_LOW (high))
2787 || (TREE_INT_CST_HIGH (low)
2788 + (TREE_INT_CST_LOW (high) == 0)
2789 != TREE_INT_CST_HIGH (high)))
2790 break;
2791 }
2792 if (i == len)
2793 {
2794 tree label = CASE_LABEL (labels[0]);
2795 default_case = build_case_label (NULL_TREE, NULL_TREE,
2796 label);
2797 }
2798 }
2799 }
2800 }
2801
2802 if (default_casep)
2803 *default_casep = default_case;
2804 }
2805
2806 /* Set the location of all statements in SEQ to LOC. */
2807
2808 void
2809 gimple_seq_set_location (gimple_seq seq, location_t loc)
2810 {
2811 for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2812 gimple_set_location (gsi_stmt (i), loc);
2813 }