Added fnspec to internal functions.
[gcc.git] / gcc / internal-fn.c
1 /* Internal functions.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tree.h"
24 #include "internal-fn.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "optabs.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "ubsan.h"
35 #include "target.h"
36 #include "predict.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39 #include "diagnostic-core.h"
40
41 /* The names of each internal function, indexed by function number. */
42 const char *const internal_fn_name_array[] = {
43 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) #CODE,
44 #include "internal-fn.def"
45 #undef DEF_INTERNAL_FN
46 "<invalid-fn>"
47 };
48
49 /* The ECF_* flags of each internal function, indexed by function number. */
50 const int internal_fn_flags_array[] = {
51 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) FLAGS,
52 #include "internal-fn.def"
53 #undef DEF_INTERNAL_FN
54 0
55 };
56
57 /* Fnspec of each internal function, indexed by function number. */
58 const_tree internal_fn_fnspec_array[IFN_LAST + 1];
59
60 void
61 init_internal_fns ()
62 {
63 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) \
64 if (FNSPEC) internal_fn_fnspec_array[IFN_##CODE] = \
65 build_string ((int) sizeof (FNSPEC) + 1, FNSPEC ? FNSPEC : "");
66 #include "internal-fn.def"
67 #undef DEF_INTERNAL_FN
68 internal_fn_fnspec_array[IFN_LAST] = 0;
69 }
70
71 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
72 for load-lanes-style optab OPTAB. The insn must exist. */
73
74 static enum insn_code
75 get_multi_vector_move (tree array_type, convert_optab optab)
76 {
77 enum insn_code icode;
78 enum machine_mode imode;
79 enum machine_mode vmode;
80
81 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
82 imode = TYPE_MODE (array_type);
83 vmode = TYPE_MODE (TREE_TYPE (array_type));
84
85 icode = convert_optab_handler (optab, imode, vmode);
86 gcc_assert (icode != CODE_FOR_nothing);
87 return icode;
88 }
89
90 /* Expand LOAD_LANES call STMT. */
91
92 static void
93 expand_LOAD_LANES (gimple stmt)
94 {
95 struct expand_operand ops[2];
96 tree type, lhs, rhs;
97 rtx target, mem;
98
99 lhs = gimple_call_lhs (stmt);
100 rhs = gimple_call_arg (stmt, 0);
101 type = TREE_TYPE (lhs);
102
103 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
104 mem = expand_normal (rhs);
105
106 gcc_assert (MEM_P (mem));
107 PUT_MODE (mem, TYPE_MODE (type));
108
109 create_output_operand (&ops[0], target, TYPE_MODE (type));
110 create_fixed_operand (&ops[1], mem);
111 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
112 }
113
114 /* Expand STORE_LANES call STMT. */
115
116 static void
117 expand_STORE_LANES (gimple stmt)
118 {
119 struct expand_operand ops[2];
120 tree type, lhs, rhs;
121 rtx target, reg;
122
123 lhs = gimple_call_lhs (stmt);
124 rhs = gimple_call_arg (stmt, 0);
125 type = TREE_TYPE (rhs);
126
127 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
128 reg = expand_normal (rhs);
129
130 gcc_assert (MEM_P (target));
131 PUT_MODE (target, TYPE_MODE (type));
132
133 create_fixed_operand (&ops[0], target);
134 create_input_operand (&ops[1], reg, TYPE_MODE (type));
135 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
136 }
137
138 static void
139 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
140 {
141 gcc_unreachable ();
142 }
143
144 /* This should get expanded in adjust_simduid_builtins. */
145
146 static void
147 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
148 {
149 gcc_unreachable ();
150 }
151
152 /* This should get expanded in adjust_simduid_builtins. */
153
154 static void
155 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
156 {
157 gcc_unreachable ();
158 }
159
160 /* This should get expanded in adjust_simduid_builtins. */
161
162 static void
163 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
164 {
165 gcc_unreachable ();
166 }
167
168 /* This should get expanded in the sanopt pass. */
169
170 static void
171 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
172 {
173 gcc_unreachable ();
174 }
175
176 /* This should get expanded in the sanopt pass. */
177
178 static void
179 expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
180 {
181 gcc_unreachable ();
182 }
183
184 /* Add sub/add overflow checking to the statement STMT.
185 CODE says whether the operation is +, or -. */
186
187 void
188 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
189 {
190 rtx res, op0, op1;
191 tree lhs, fn, arg0, arg1;
192 rtx done_label, do_error, target = NULL_RTX;
193
194 lhs = gimple_call_lhs (stmt);
195 arg0 = gimple_call_arg (stmt, 0);
196 arg1 = gimple_call_arg (stmt, 1);
197 done_label = gen_label_rtx ();
198 do_error = gen_label_rtx ();
199 do_pending_stack_adjust ();
200 op0 = expand_normal (arg0);
201 op1 = expand_normal (arg1);
202
203 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
204 if (lhs)
205 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
206
207 enum insn_code icode
208 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
209 if (icode != CODE_FOR_nothing)
210 {
211 struct expand_operand ops[4];
212 rtx last = get_last_insn ();
213
214 res = gen_reg_rtx (mode);
215 create_output_operand (&ops[0], res, mode);
216 create_input_operand (&ops[1], op0, mode);
217 create_input_operand (&ops[2], op1, mode);
218 create_fixed_operand (&ops[3], do_error);
219 if (maybe_expand_insn (icode, 4, ops))
220 {
221 last = get_last_insn ();
222 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
223 && JUMP_P (last)
224 && any_condjump_p (last)
225 && !find_reg_note (last, REG_BR_PROB, 0))
226 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
227 emit_jump (done_label);
228 }
229 else
230 {
231 delete_insns_since (last);
232 icode = CODE_FOR_nothing;
233 }
234 }
235
236 if (icode == CODE_FOR_nothing)
237 {
238 rtx sub_check = gen_label_rtx ();
239 int pos_neg = 3;
240
241 /* Compute the operation. On RTL level, the addition is always
242 unsigned. */
243 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
244 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
245
246 /* If we can prove one of the arguments (for MINUS_EXPR only
247 the second operand, as subtraction is not commutative) is always
248 non-negative or always negative, we can do just one comparison
249 and conditional jump instead of 2 at runtime, 3 present in the
250 emitted code. If one of the arguments is CONST_INT, all we
251 need is to make sure it is op1, then the first
252 emit_cmp_and_jump_insns will be just folded. Otherwise try
253 to use range info if available. */
254 if (code == PLUS_EXPR && CONST_INT_P (op0))
255 {
256 rtx tem = op0;
257 op0 = op1;
258 op1 = tem;
259 }
260 else if (CONST_INT_P (op1))
261 ;
262 else if (code == PLUS_EXPR && TREE_CODE (arg0) == SSA_NAME)
263 {
264 wide_int arg0_min, arg0_max;
265 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
266 {
267 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
268 pos_neg = 1;
269 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
270 pos_neg = 2;
271 }
272 if (pos_neg != 3)
273 {
274 rtx tem = op0;
275 op0 = op1;
276 op1 = tem;
277 }
278 }
279 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
280 {
281 wide_int arg1_min, arg1_max;
282 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
283 {
284 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
285 pos_neg = 1;
286 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
287 pos_neg = 2;
288 }
289 }
290
291 /* If the op1 is negative, we have to use a different check. */
292 if (pos_neg == 3)
293 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
294 false, sub_check, PROB_EVEN);
295
296 /* Compare the result of the operation with one of the operands. */
297 if (pos_neg & 1)
298 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
299 NULL_RTX, mode, false, done_label,
300 PROB_VERY_LIKELY);
301
302 /* If we get here, we have to print the error. */
303 if (pos_neg == 3)
304 {
305 emit_jump (do_error);
306
307 emit_label (sub_check);
308 }
309
310 /* We have k = a + b for b < 0 here. k <= a must hold. */
311 if (pos_neg & 2)
312 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
313 NULL_RTX, mode, false, done_label,
314 PROB_VERY_LIKELY);
315 }
316
317 emit_label (do_error);
318 /* Expand the ubsan builtin call. */
319 push_temp_slots ();
320 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
321 TREE_TYPE (arg0), arg0, arg1);
322 expand_normal (fn);
323 pop_temp_slots ();
324 do_pending_stack_adjust ();
325
326 /* We're done. */
327 emit_label (done_label);
328
329 if (lhs)
330 emit_move_insn (target, res);
331 }
332
333 /* Add negate overflow checking to the statement STMT. */
334
335 void
336 ubsan_expand_si_overflow_neg_check (gimple stmt)
337 {
338 rtx res, op1;
339 tree lhs, fn, arg1;
340 rtx done_label, do_error, target = NULL_RTX;
341
342 lhs = gimple_call_lhs (stmt);
343 arg1 = gimple_call_arg (stmt, 1);
344 done_label = gen_label_rtx ();
345 do_error = gen_label_rtx ();
346
347 do_pending_stack_adjust ();
348 op1 = expand_normal (arg1);
349
350 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
351 if (lhs)
352 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
353
354 enum insn_code icode = optab_handler (negv3_optab, mode);
355 if (icode != CODE_FOR_nothing)
356 {
357 struct expand_operand ops[3];
358 rtx last = get_last_insn ();
359
360 res = gen_reg_rtx (mode);
361 create_output_operand (&ops[0], res, mode);
362 create_input_operand (&ops[1], op1, mode);
363 create_fixed_operand (&ops[2], do_error);
364 if (maybe_expand_insn (icode, 3, ops))
365 {
366 last = get_last_insn ();
367 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
368 && JUMP_P (last)
369 && any_condjump_p (last)
370 && !find_reg_note (last, REG_BR_PROB, 0))
371 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
372 emit_jump (done_label);
373 }
374 else
375 {
376 delete_insns_since (last);
377 icode = CODE_FOR_nothing;
378 }
379 }
380
381 if (icode == CODE_FOR_nothing)
382 {
383 /* Compute the operation. On RTL level, the addition is always
384 unsigned. */
385 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
386
387 /* Compare the operand with the most negative value. */
388 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
389 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
390 done_label, PROB_VERY_LIKELY);
391 }
392
393 emit_label (do_error);
394 /* Expand the ubsan builtin call. */
395 push_temp_slots ();
396 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
397 TREE_TYPE (arg1), arg1, NULL_TREE);
398 expand_normal (fn);
399 pop_temp_slots ();
400 do_pending_stack_adjust ();
401
402 /* We're done. */
403 emit_label (done_label);
404
405 if (lhs)
406 emit_move_insn (target, res);
407 }
408
409 /* Add mul overflow checking to the statement STMT. */
410
411 void
412 ubsan_expand_si_overflow_mul_check (gimple stmt)
413 {
414 rtx res, op0, op1;
415 tree lhs, fn, arg0, arg1;
416 rtx done_label, do_error, target = NULL_RTX;
417
418 lhs = gimple_call_lhs (stmt);
419 arg0 = gimple_call_arg (stmt, 0);
420 arg1 = gimple_call_arg (stmt, 1);
421 done_label = gen_label_rtx ();
422 do_error = gen_label_rtx ();
423
424 do_pending_stack_adjust ();
425 op0 = expand_normal (arg0);
426 op1 = expand_normal (arg1);
427
428 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
429 if (lhs)
430 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
431
432 enum insn_code icode = optab_handler (mulv4_optab, mode);
433 if (icode != CODE_FOR_nothing)
434 {
435 struct expand_operand ops[4];
436 rtx last = get_last_insn ();
437
438 res = gen_reg_rtx (mode);
439 create_output_operand (&ops[0], res, mode);
440 create_input_operand (&ops[1], op0, mode);
441 create_input_operand (&ops[2], op1, mode);
442 create_fixed_operand (&ops[3], do_error);
443 if (maybe_expand_insn (icode, 4, ops))
444 {
445 last = get_last_insn ();
446 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
447 && JUMP_P (last)
448 && any_condjump_p (last)
449 && !find_reg_note (last, REG_BR_PROB, 0))
450 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
451 emit_jump (done_label);
452 }
453 else
454 {
455 delete_insns_since (last);
456 icode = CODE_FOR_nothing;
457 }
458 }
459
460 if (icode == CODE_FOR_nothing)
461 {
462 struct separate_ops ops;
463 enum machine_mode hmode
464 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
465 ops.op0 = arg0;
466 ops.op1 = arg1;
467 ops.op2 = NULL_TREE;
468 ops.location = gimple_location (stmt);
469 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
470 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
471 {
472 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
473 ops.code = WIDEN_MULT_EXPR;
474 ops.type
475 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
476
477 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
478 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
479 GET_MODE_PRECISION (mode), NULL_RTX, 0);
480 hipart = gen_lowpart (mode, hipart);
481 res = gen_lowpart (mode, res);
482 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
483 GET_MODE_PRECISION (mode) - 1,
484 NULL_RTX, 0);
485 /* RES is low half of the double width result, HIPART
486 the high half. There was overflow if
487 HIPART is different from RES < 0 ? -1 : 0. */
488 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
489 false, done_label, PROB_VERY_LIKELY);
490 }
491 else if (hmode != BLKmode
492 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
493 {
494 rtx large_op0 = gen_label_rtx ();
495 rtx small_op0_large_op1 = gen_label_rtx ();
496 rtx one_small_one_large = gen_label_rtx ();
497 rtx both_ops_large = gen_label_rtx ();
498 rtx after_hipart_neg = gen_label_rtx ();
499 rtx after_lopart_neg = gen_label_rtx ();
500 rtx do_overflow = gen_label_rtx ();
501 rtx hipart_different = gen_label_rtx ();
502
503 unsigned int hprec = GET_MODE_PRECISION (hmode);
504 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
505 NULL_RTX, 0);
506 hipart0 = gen_lowpart (hmode, hipart0);
507 rtx lopart0 = gen_lowpart (hmode, op0);
508 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
509 NULL_RTX, 0);
510 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
511 NULL_RTX, 0);
512 hipart1 = gen_lowpart (hmode, hipart1);
513 rtx lopart1 = gen_lowpart (hmode, op1);
514 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
515 NULL_RTX, 0);
516
517 res = gen_reg_rtx (mode);
518
519 /* True if op0 resp. op1 are known to be in the range of
520 halfstype. */
521 bool op0_small_p = false;
522 bool op1_small_p = false;
523 /* True if op0 resp. op1 are known to have all zeros or all ones
524 in the upper half of bits, but are not known to be
525 op{0,1}_small_p. */
526 bool op0_medium_p = false;
527 bool op1_medium_p = false;
528 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
529 nonnegative, 1 if unknown. */
530 int op0_sign = 1;
531 int op1_sign = 1;
532
533 if (TREE_CODE (arg0) == SSA_NAME)
534 {
535 wide_int arg0_min, arg0_max;
536 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
537 {
538 unsigned int mprec0 = wi::min_precision (arg0_min, SIGNED);
539 unsigned int mprec1 = wi::min_precision (arg0_max, SIGNED);
540 if (mprec0 <= hprec && mprec1 <= hprec)
541 op0_small_p = true;
542 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
543 op0_medium_p = true;
544 if (!wi::neg_p (arg0_min, TYPE_SIGN (TREE_TYPE (arg0))))
545 op0_sign = 0;
546 else if (wi::neg_p (arg0_max, TYPE_SIGN (TREE_TYPE (arg0))))
547 op0_sign = -1;
548 }
549 }
550 if (TREE_CODE (arg1) == SSA_NAME)
551 {
552 wide_int arg1_min, arg1_max;
553 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
554 {
555 unsigned int mprec0 = wi::min_precision (arg1_min, SIGNED);
556 unsigned int mprec1 = wi::min_precision (arg1_max, SIGNED);
557 if (mprec0 <= hprec && mprec1 <= hprec)
558 op1_small_p = true;
559 else if (mprec0 <= hprec + 1 && mprec1 <= hprec + 1)
560 op1_medium_p = true;
561 if (!wi::neg_p (arg1_min, TYPE_SIGN (TREE_TYPE (arg1))))
562 op1_sign = 0;
563 else if (wi::neg_p (arg1_max, TYPE_SIGN (TREE_TYPE (arg1))))
564 op1_sign = -1;
565 }
566 }
567
568 int smaller_sign = 1;
569 int larger_sign = 1;
570 if (op0_small_p)
571 {
572 smaller_sign = op0_sign;
573 larger_sign = op1_sign;
574 }
575 else if (op1_small_p)
576 {
577 smaller_sign = op1_sign;
578 larger_sign = op0_sign;
579 }
580 else if (op0_sign == op1_sign)
581 {
582 smaller_sign = op0_sign;
583 larger_sign = op0_sign;
584 }
585
586 if (!op0_small_p)
587 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
588 false, large_op0, PROB_UNLIKELY);
589
590 if (!op1_small_p)
591 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
592 false, small_op0_large_op1,
593 PROB_UNLIKELY);
594
595 /* If both op0 and op1 are sign extended from hmode to mode,
596 the multiplication will never overflow. We can do just one
597 hmode x hmode => mode widening multiplication. */
598 if (GET_CODE (lopart0) == SUBREG)
599 {
600 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
601 SUBREG_PROMOTED_SET (lopart0, 0);
602 }
603 if (GET_CODE (lopart1) == SUBREG)
604 {
605 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
606 SUBREG_PROMOTED_SET (lopart1, 0);
607 }
608 tree halfstype = build_nonstandard_integer_type (hprec, 0);
609 ops.op0 = make_tree (halfstype, lopart0);
610 ops.op1 = make_tree (halfstype, lopart1);
611 ops.code = WIDEN_MULT_EXPR;
612 ops.type = TREE_TYPE (arg0);
613 rtx thisres
614 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
615 emit_move_insn (res, thisres);
616 emit_jump (done_label);
617
618 emit_label (small_op0_large_op1);
619
620 /* If op0 is sign extended from hmode to mode, but op1 is not,
621 just swap the arguments and handle it as op1 sign extended,
622 op0 not. */
623 rtx larger = gen_reg_rtx (mode);
624 rtx hipart = gen_reg_rtx (hmode);
625 rtx lopart = gen_reg_rtx (hmode);
626 emit_move_insn (larger, op1);
627 emit_move_insn (hipart, hipart1);
628 emit_move_insn (lopart, lopart0);
629 emit_jump (one_small_one_large);
630
631 emit_label (large_op0);
632
633 if (!op1_small_p)
634 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
635 false, both_ops_large, PROB_UNLIKELY);
636
637 /* If op1 is sign extended from hmode to mode, but op0 is not,
638 prepare larger, hipart and lopart pseudos and handle it together
639 with small_op0_large_op1. */
640 emit_move_insn (larger, op0);
641 emit_move_insn (hipart, hipart0);
642 emit_move_insn (lopart, lopart1);
643
644 emit_label (one_small_one_large);
645
646 /* lopart is the low part of the operand that is sign extended
647 to mode, larger is the the other operand, hipart is the
648 high part of larger and lopart0 and lopart1 are the low parts
649 of both operands.
650 We perform lopart0 * lopart1 and lopart * hipart widening
651 multiplications. */
652 tree halfutype = build_nonstandard_integer_type (hprec, 1);
653 ops.op0 = make_tree (halfutype, lopart0);
654 ops.op1 = make_tree (halfutype, lopart1);
655 rtx lo0xlo1
656 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
657
658 ops.op0 = make_tree (halfutype, lopart);
659 ops.op1 = make_tree (halfutype, hipart);
660 rtx loxhi = gen_reg_rtx (mode);
661 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
662 emit_move_insn (loxhi, tem);
663
664 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
665 if (larger_sign == 0)
666 emit_jump (after_hipart_neg);
667 else if (larger_sign != -1)
668 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
669 false, after_hipart_neg, PROB_EVEN);
670
671 tem = convert_modes (mode, hmode, lopart, 1);
672 tem = expand_shift (LSHIFT_EXPR, mode, tem, hprec, NULL_RTX, 1);
673 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
674 1, OPTAB_DIRECT);
675 emit_move_insn (loxhi, tem);
676
677 emit_label (after_hipart_neg);
678
679 /* if (lopart < 0) loxhi -= larger; */
680 if (smaller_sign == 0)
681 emit_jump (after_lopart_neg);
682 else if (smaller_sign != -1)
683 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
684 false, after_lopart_neg, PROB_EVEN);
685
686 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
687 1, OPTAB_DIRECT);
688 emit_move_insn (loxhi, tem);
689
690 emit_label (after_lopart_neg);
691
692 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
693 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
694 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
695 1, OPTAB_DIRECT);
696 emit_move_insn (loxhi, tem);
697
698 /* if (loxhi >> (bitsize / 2)
699 == (hmode) loxhi >> (bitsize / 2 - 1)) */
700 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
701 NULL_RTX, 0);
702 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
703 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
704 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
705 hprec - 1, NULL_RTX, 0);
706
707 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
708 hmode, false, do_overflow,
709 PROB_VERY_UNLIKELY);
710
711 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
712 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
713 NULL_RTX, 1);
714 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
715
716 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
717 1, OPTAB_DIRECT);
718 if (tem != res)
719 emit_move_insn (res, tem);
720 emit_jump (done_label);
721
722 emit_label (both_ops_large);
723
724 /* If both operands are large (not sign extended from hmode),
725 then perform the full multiplication which will be the result
726 of the operation. The only cases which don't overflow are
727 some cases where both hipart0 and highpart1 are 0 or -1. */
728 ops.code = MULT_EXPR;
729 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
730 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
731 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
732 emit_move_insn (res, tem);
733
734 if (!op0_medium_p)
735 {
736 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
737 NULL_RTX, 1, OPTAB_DIRECT);
738 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
739 true, do_error, PROB_VERY_UNLIKELY);
740 }
741
742 if (!op1_medium_p)
743 {
744 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
745 NULL_RTX, 1, OPTAB_DIRECT);
746 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
747 true, do_error, PROB_VERY_UNLIKELY);
748 }
749
750 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
751 same, overflow happened if res is negative, if they are different,
752 overflow happened if res is positive. */
753 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
754 emit_jump (hipart_different);
755 else if (op0_sign == 1 || op1_sign == 1)
756 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
757 true, hipart_different, PROB_EVEN);
758
759 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
760 do_error, PROB_VERY_UNLIKELY);
761 emit_jump (done_label);
762
763 emit_label (hipart_different);
764
765 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
766 do_error, PROB_VERY_UNLIKELY);
767 emit_jump (done_label);
768
769 emit_label (do_overflow);
770
771 /* Overflow, do full multiplication and fallthru into do_error. */
772 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
773 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
774 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
775 emit_move_insn (res, tem);
776 }
777 else
778 {
779 ops.code = MULT_EXPR;
780 ops.type = TREE_TYPE (arg0);
781 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
782 emit_jump (done_label);
783 }
784 }
785
786 emit_label (do_error);
787 /* Expand the ubsan builtin call. */
788 push_temp_slots ();
789 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
790 TREE_TYPE (arg0), arg0, arg1);
791 expand_normal (fn);
792 pop_temp_slots ();
793 do_pending_stack_adjust ();
794
795 /* We're done. */
796 emit_label (done_label);
797
798 if (lhs)
799 emit_move_insn (target, res);
800 }
801
802 /* Expand UBSAN_CHECK_ADD call STMT. */
803
804 static void
805 expand_UBSAN_CHECK_ADD (gimple stmt)
806 {
807 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
808 }
809
810 /* Expand UBSAN_CHECK_SUB call STMT. */
811
812 static void
813 expand_UBSAN_CHECK_SUB (gimple stmt)
814 {
815 if (integer_zerop (gimple_call_arg (stmt, 0)))
816 ubsan_expand_si_overflow_neg_check (stmt);
817 else
818 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
819 }
820
821 /* Expand UBSAN_CHECK_MUL call STMT. */
822
823 static void
824 expand_UBSAN_CHECK_MUL (gimple stmt)
825 {
826 ubsan_expand_si_overflow_mul_check (stmt);
827 }
828
829 /* This should get folded in tree-vectorizer.c. */
830
831 static void
832 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
833 {
834 gcc_unreachable ();
835 }
836
837 static void
838 expand_MASK_LOAD (gimple stmt)
839 {
840 struct expand_operand ops[3];
841 tree type, lhs, rhs, maskt;
842 rtx mem, target, mask;
843
844 maskt = gimple_call_arg (stmt, 2);
845 lhs = gimple_call_lhs (stmt);
846 if (lhs == NULL_TREE)
847 return;
848 type = TREE_TYPE (lhs);
849 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
850 gimple_call_arg (stmt, 1));
851
852 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
853 gcc_assert (MEM_P (mem));
854 mask = expand_normal (maskt);
855 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
856 create_output_operand (&ops[0], target, TYPE_MODE (type));
857 create_fixed_operand (&ops[1], mem);
858 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
859 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
860 }
861
862 static void
863 expand_MASK_STORE (gimple stmt)
864 {
865 struct expand_operand ops[3];
866 tree type, lhs, rhs, maskt;
867 rtx mem, reg, mask;
868
869 maskt = gimple_call_arg (stmt, 2);
870 rhs = gimple_call_arg (stmt, 3);
871 type = TREE_TYPE (rhs);
872 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
873 gimple_call_arg (stmt, 1));
874
875 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
876 gcc_assert (MEM_P (mem));
877 mask = expand_normal (maskt);
878 reg = expand_normal (rhs);
879 create_fixed_operand (&ops[0], mem);
880 create_input_operand (&ops[1], reg, TYPE_MODE (type));
881 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
882 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
883 }
884
885 static void
886 expand_ABNORMAL_DISPATCHER (gimple)
887 {
888 }
889
890 static void
891 expand_BUILTIN_EXPECT (gimple stmt)
892 {
893 /* When guessing was done, the hints should be already stripped away. */
894 gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
895
896 rtx target;
897 tree lhs = gimple_call_lhs (stmt);
898 if (lhs)
899 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
900 else
901 target = const0_rtx;
902 rtx val = expand_expr (gimple_call_arg (stmt, 0), target, VOIDmode, EXPAND_NORMAL);
903 if (lhs && val != target)
904 emit_move_insn (target, val);
905 }
906
907 /* Routines to expand each internal function, indexed by function number.
908 Each routine has the prototype:
909
910 expand_<NAME> (gimple stmt)
911
912 where STMT is the statement that performs the call. */
913 static void (*const internal_fn_expanders[]) (gimple) = {
914 #define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
915 #include "internal-fn.def"
916 #undef DEF_INTERNAL_FN
917 0
918 };
919
920 /* Expand STMT, which is a call to internal function FN. */
921
922 void
923 expand_internal_call (gimple stmt)
924 {
925 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
926 }