internal-fn.c: Include stringpool.h and tree-ssanames.h.
[gcc.git] / gcc / internal-fn.c
1 /* Internal functions.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "internal-fn.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "optabs.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "ubsan.h"
35 #include "target.h"
36 #include "predict.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39
40 /* The names of each internal function, indexed by function number. */
41 const char *const internal_fn_name_array[] = {
42 #define DEF_INTERNAL_FN(CODE, FLAGS) #CODE,
43 #include "internal-fn.def"
44 #undef DEF_INTERNAL_FN
45 "<invalid-fn>"
46 };
47
48 /* The ECF_* flags of each internal function, indexed by function number. */
49 const int internal_fn_flags_array[] = {
50 #define DEF_INTERNAL_FN(CODE, FLAGS) FLAGS,
51 #include "internal-fn.def"
52 #undef DEF_INTERNAL_FN
53 0
54 };
55
56 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
57 for load-lanes-style optab OPTAB. The insn must exist. */
58
59 static enum insn_code
60 get_multi_vector_move (tree array_type, convert_optab optab)
61 {
62 enum insn_code icode;
63 enum machine_mode imode;
64 enum machine_mode vmode;
65
66 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
67 imode = TYPE_MODE (array_type);
68 vmode = TYPE_MODE (TREE_TYPE (array_type));
69
70 icode = convert_optab_handler (optab, imode, vmode);
71 gcc_assert (icode != CODE_FOR_nothing);
72 return icode;
73 }
74
75 /* Expand LOAD_LANES call STMT. */
76
77 static void
78 expand_LOAD_LANES (gimple stmt)
79 {
80 struct expand_operand ops[2];
81 tree type, lhs, rhs;
82 rtx target, mem;
83
84 lhs = gimple_call_lhs (stmt);
85 rhs = gimple_call_arg (stmt, 0);
86 type = TREE_TYPE (lhs);
87
88 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
89 mem = expand_normal (rhs);
90
91 gcc_assert (MEM_P (mem));
92 PUT_MODE (mem, TYPE_MODE (type));
93
94 create_output_operand (&ops[0], target, TYPE_MODE (type));
95 create_fixed_operand (&ops[1], mem);
96 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
97 }
98
99 /* Expand STORE_LANES call STMT. */
100
101 static void
102 expand_STORE_LANES (gimple stmt)
103 {
104 struct expand_operand ops[2];
105 tree type, lhs, rhs;
106 rtx target, reg;
107
108 lhs = gimple_call_lhs (stmt);
109 rhs = gimple_call_arg (stmt, 0);
110 type = TREE_TYPE (rhs);
111
112 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
113 reg = expand_normal (rhs);
114
115 gcc_assert (MEM_P (target));
116 PUT_MODE (target, TYPE_MODE (type));
117
118 create_fixed_operand (&ops[0], target);
119 create_input_operand (&ops[1], reg, TYPE_MODE (type));
120 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
121 }
122
123 static void
124 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
125 {
126 gcc_unreachable ();
127 }
128
129 /* This should get expanded in adjust_simduid_builtins. */
130
131 static void
132 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
133 {
134 gcc_unreachable ();
135 }
136
137 /* This should get expanded in adjust_simduid_builtins. */
138
139 static void
140 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
141 {
142 gcc_unreachable ();
143 }
144
145 /* This should get expanded in adjust_simduid_builtins. */
146
147 static void
148 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
149 {
150 gcc_unreachable ();
151 }
152
153 /* This should get expanded in the sanopt pass. */
154
155 static void
156 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
157 {
158 gcc_unreachable ();
159 }
160
161 /* Add sub/add overflow checking to the statement STMT.
162 CODE says whether the operation is +, or -. */
163
164 void
165 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
166 {
167 rtx res, op0, op1;
168 tree lhs, fn, arg0, arg1;
169 rtx done_label, do_error, target = NULL_RTX;
170
171 lhs = gimple_call_lhs (stmt);
172 arg0 = gimple_call_arg (stmt, 0);
173 arg1 = gimple_call_arg (stmt, 1);
174 done_label = gen_label_rtx ();
175 do_error = gen_label_rtx ();
176 do_pending_stack_adjust ();
177 op0 = expand_normal (arg0);
178 op1 = expand_normal (arg1);
179
180 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
181 if (lhs)
182 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
183
184 enum insn_code icode
185 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
186 if (icode != CODE_FOR_nothing)
187 {
188 struct expand_operand ops[4];
189 rtx last = get_last_insn ();
190
191 res = gen_reg_rtx (mode);
192 create_output_operand (&ops[0], res, mode);
193 create_input_operand (&ops[1], op0, mode);
194 create_input_operand (&ops[2], op1, mode);
195 create_fixed_operand (&ops[3], do_error);
196 if (maybe_expand_insn (icode, 4, ops))
197 {
198 last = get_last_insn ();
199 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
200 && JUMP_P (last)
201 && any_condjump_p (last)
202 && !find_reg_note (last, REG_BR_PROB, 0))
203 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
204 emit_jump (done_label);
205 }
206 else
207 {
208 delete_insns_since (last);
209 icode = CODE_FOR_nothing;
210 }
211 }
212
213 if (icode == CODE_FOR_nothing)
214 {
215 rtx sub_check = gen_label_rtx ();
216 int pos_neg = 3;
217
218 /* Compute the operation. On RTL level, the addition is always
219 unsigned. */
220 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
221 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
222
223 /* If we can prove one of the arguments is always non-negative
224 or always negative, we can do just one comparison and
225 conditional jump instead of 2 at runtime, 3 present in the
226 emitted code. If one of the arguments is CONST_INT, all we
227 need is to make sure it is op1, then the first
228 emit_cmp_and_jump_insns will be just folded. Otherwise try
229 to use range info if available. */
230 if (CONST_INT_P (op0))
231 {
232 rtx tem = op0;
233 op0 = op1;
234 op1 = tem;
235 }
236 else if (CONST_INT_P (op1))
237 ;
238 else if (TREE_CODE (arg0) == SSA_NAME)
239 {
240 double_int arg0_min, arg0_max;
241 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
242 {
243 if (!arg0_min.is_negative ())
244 pos_neg = 1;
245 else if (arg0_max.is_negative ())
246 pos_neg = 2;
247 }
248 if (pos_neg != 3)
249 {
250 rtx tem = op0;
251 op0 = op1;
252 op1 = tem;
253 }
254 }
255 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
256 {
257 double_int arg1_min, arg1_max;
258 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
259 {
260 if (!arg1_min.is_negative ())
261 pos_neg = 1;
262 else if (arg1_max.is_negative ())
263 pos_neg = 2;
264 }
265 }
266
267 /* If the op1 is negative, we have to use a different check. */
268 if (pos_neg == 3)
269 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
270 false, sub_check, PROB_EVEN);
271
272 /* Compare the result of the operation with one of the operands. */
273 if (pos_neg & 1)
274 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
275 NULL_RTX, mode, false, done_label,
276 PROB_VERY_LIKELY);
277
278 /* If we get here, we have to print the error. */
279 if (pos_neg == 3)
280 {
281 emit_jump (do_error);
282
283 emit_label (sub_check);
284 }
285
286 /* We have k = a + b for b < 0 here. k <= a must hold. */
287 if (pos_neg & 2)
288 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
289 NULL_RTX, mode, false, done_label,
290 PROB_VERY_LIKELY);
291 }
292
293 emit_label (do_error);
294 /* Expand the ubsan builtin call. */
295 push_temp_slots ();
296 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
297 TREE_TYPE (arg0), arg0, arg1);
298 expand_normal (fn);
299 pop_temp_slots ();
300 do_pending_stack_adjust ();
301
302 /* We're done. */
303 emit_label (done_label);
304
305 if (lhs)
306 emit_move_insn (target, res);
307 }
308
309 /* Add negate overflow checking to the statement STMT. */
310
311 void
312 ubsan_expand_si_overflow_neg_check (gimple stmt)
313 {
314 rtx res, op1;
315 tree lhs, fn, arg1;
316 rtx done_label, do_error, target = NULL_RTX;
317
318 lhs = gimple_call_lhs (stmt);
319 arg1 = gimple_call_arg (stmt, 1);
320 done_label = gen_label_rtx ();
321 do_error = gen_label_rtx ();
322
323 do_pending_stack_adjust ();
324 op1 = expand_normal (arg1);
325
326 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
327 if (lhs)
328 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
329
330 enum insn_code icode = optab_handler (negv3_optab, mode);
331 if (icode != CODE_FOR_nothing)
332 {
333 struct expand_operand ops[3];
334 rtx last = get_last_insn ();
335
336 res = gen_reg_rtx (mode);
337 create_output_operand (&ops[0], res, mode);
338 create_input_operand (&ops[1], op1, mode);
339 create_fixed_operand (&ops[2], do_error);
340 if (maybe_expand_insn (icode, 3, ops))
341 {
342 last = get_last_insn ();
343 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
344 && JUMP_P (last)
345 && any_condjump_p (last)
346 && !find_reg_note (last, REG_BR_PROB, 0))
347 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
348 emit_jump (done_label);
349 }
350 else
351 {
352 delete_insns_since (last);
353 icode = CODE_FOR_nothing;
354 }
355 }
356
357 if (icode == CODE_FOR_nothing)
358 {
359 /* Compute the operation. On RTL level, the addition is always
360 unsigned. */
361 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
362
363 /* Compare the operand with the most negative value. */
364 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
365 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
366 done_label, PROB_VERY_LIKELY);
367 }
368
369 emit_label (do_error);
370 /* Expand the ubsan builtin call. */
371 push_temp_slots ();
372 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
373 TREE_TYPE (arg1), arg1, NULL_TREE);
374 expand_normal (fn);
375 pop_temp_slots ();
376 do_pending_stack_adjust ();
377
378 /* We're done. */
379 emit_label (done_label);
380
381 if (lhs)
382 emit_move_insn (target, res);
383 }
384
385 /* Add mul overflow checking to the statement STMT. */
386
387 void
388 ubsan_expand_si_overflow_mul_check (gimple stmt)
389 {
390 rtx res, op0, op1;
391 tree lhs, fn, arg0, arg1;
392 rtx done_label, do_error, target = NULL_RTX;
393
394 lhs = gimple_call_lhs (stmt);
395 arg0 = gimple_call_arg (stmt, 0);
396 arg1 = gimple_call_arg (stmt, 1);
397 done_label = gen_label_rtx ();
398 do_error = gen_label_rtx ();
399
400 do_pending_stack_adjust ();
401 op0 = expand_normal (arg0);
402 op1 = expand_normal (arg1);
403
404 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
405 if (lhs)
406 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
407
408 enum insn_code icode = optab_handler (mulv4_optab, mode);
409 if (icode != CODE_FOR_nothing)
410 {
411 struct expand_operand ops[4];
412 rtx last = get_last_insn ();
413
414 res = gen_reg_rtx (mode);
415 create_output_operand (&ops[0], res, mode);
416 create_input_operand (&ops[1], op0, mode);
417 create_input_operand (&ops[2], op1, mode);
418 create_fixed_operand (&ops[3], do_error);
419 if (maybe_expand_insn (icode, 4, ops))
420 {
421 last = get_last_insn ();
422 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
423 && JUMP_P (last)
424 && any_condjump_p (last)
425 && !find_reg_note (last, REG_BR_PROB, 0))
426 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
427 emit_jump (done_label);
428 }
429 else
430 {
431 delete_insns_since (last);
432 icode = CODE_FOR_nothing;
433 }
434 }
435
436 if (icode == CODE_FOR_nothing)
437 {
438 struct separate_ops ops;
439 ops.op0 = arg0;
440 ops.op1 = arg1;
441 ops.op2 = NULL_TREE;
442 ops.location = gimple_location (stmt);
443 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
444 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
445 {
446 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
447 ops.code = WIDEN_MULT_EXPR;
448 ops.type
449 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
450
451 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
452 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
453 GET_MODE_PRECISION (mode), NULL_RTX, 0);
454 hipart = gen_lowpart (mode, hipart);
455 res = gen_lowpart (mode, res);
456 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
457 GET_MODE_PRECISION (mode) - 1,
458 NULL_RTX, 0);
459 /* RES is low half of the double width result, HIPART
460 the high half. There was overflow if
461 HIPART is different from RES < 0 ? -1 : 0. */
462 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
463 false, done_label, PROB_VERY_LIKELY);
464 }
465 else
466 {
467 /* For now we don't instrument this. See __mulvDI3 in libgcc2.c
468 for what could be done. */
469 ops.code = MULT_EXPR;
470 ops.type = TREE_TYPE (arg0);
471 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
472 emit_jump (done_label);
473 }
474 }
475
476 emit_label (do_error);
477 /* Expand the ubsan builtin call. */
478 push_temp_slots ();
479 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
480 TREE_TYPE (arg0), arg0, arg1);
481 expand_normal (fn);
482 pop_temp_slots ();
483 do_pending_stack_adjust ();
484
485 /* We're done. */
486 emit_label (done_label);
487
488 if (lhs)
489 emit_move_insn (target, res);
490 }
491
492 /* Expand UBSAN_CHECK_ADD call STMT. */
493
494 static void
495 expand_UBSAN_CHECK_ADD (gimple stmt)
496 {
497 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
498 }
499
500 /* Expand UBSAN_CHECK_SUB call STMT. */
501
502 static void
503 expand_UBSAN_CHECK_SUB (gimple stmt)
504 {
505 if (integer_zerop (gimple_call_arg (stmt, 0)))
506 ubsan_expand_si_overflow_neg_check (stmt);
507 else
508 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
509 }
510
511 /* Expand UBSAN_CHECK_MUL call STMT. */
512
513 static void
514 expand_UBSAN_CHECK_MUL (gimple stmt)
515 {
516 ubsan_expand_si_overflow_mul_check (stmt);
517 }
518
519 /* This should get folded in tree-vectorizer.c. */
520
521 static void
522 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
523 {
524 gcc_unreachable ();
525 }
526
527 static void
528 expand_MASK_LOAD (gimple stmt)
529 {
530 struct expand_operand ops[3];
531 tree type, lhs, rhs, maskt;
532 rtx mem, target, mask;
533
534 maskt = gimple_call_arg (stmt, 2);
535 lhs = gimple_call_lhs (stmt);
536 type = TREE_TYPE (lhs);
537 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
538 gimple_call_arg (stmt, 1));
539
540 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
541 gcc_assert (MEM_P (mem));
542 mask = expand_normal (maskt);
543 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
544 create_output_operand (&ops[0], target, TYPE_MODE (type));
545 create_fixed_operand (&ops[1], mem);
546 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
547 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
548 }
549
550 static void
551 expand_MASK_STORE (gimple stmt)
552 {
553 struct expand_operand ops[3];
554 tree type, lhs, rhs, maskt;
555 rtx mem, reg, mask;
556
557 maskt = gimple_call_arg (stmt, 2);
558 rhs = gimple_call_arg (stmt, 3);
559 type = TREE_TYPE (rhs);
560 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
561 gimple_call_arg (stmt, 1));
562
563 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
564 gcc_assert (MEM_P (mem));
565 mask = expand_normal (maskt);
566 reg = expand_normal (rhs);
567 create_fixed_operand (&ops[0], mem);
568 create_input_operand (&ops[1], reg, TYPE_MODE (type));
569 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
570 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
571 }
572
573 /* Routines to expand each internal function, indexed by function number.
574 Each routine has the prototype:
575
576 expand_<NAME> (gimple stmt)
577
578 where STMT is the statement that performs the call. */
579 static void (*const internal_fn_expanders[]) (gimple) = {
580 #define DEF_INTERNAL_FN(CODE, FLAGS) expand_##CODE,
581 #include "internal-fn.def"
582 #undef DEF_INTERNAL_FN
583 0
584 };
585
586 /* Expand STMT, which is a call to internal function FN. */
587
588 void
589 expand_internal_call (gimple stmt)
590 {
591 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
592 }