Update copyright years in gcc/
[gcc.git] / gcc / internal-fn.c
1 /* Internal functions.
2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "internal-fn.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "expr.h"
27 #include "optabs.h"
28 #include "basic-block.h"
29 #include "tree-ssa-alias.h"
30 #include "internal-fn.h"
31 #include "gimple-expr.h"
32 #include "is-a.h"
33 #include "gimple.h"
34 #include "ubsan.h"
35 #include "target.h"
36 #include "predict.h"
37 #include "stringpool.h"
38 #include "tree-ssanames.h"
39
40 /* The names of each internal function, indexed by function number. */
41 const char *const internal_fn_name_array[] = {
42 #define DEF_INTERNAL_FN(CODE, FLAGS) #CODE,
43 #include "internal-fn.def"
44 #undef DEF_INTERNAL_FN
45 "<invalid-fn>"
46 };
47
48 /* The ECF_* flags of each internal function, indexed by function number. */
49 const int internal_fn_flags_array[] = {
50 #define DEF_INTERNAL_FN(CODE, FLAGS) FLAGS,
51 #include "internal-fn.def"
52 #undef DEF_INTERNAL_FN
53 0
54 };
55
56 /* ARRAY_TYPE is an array of vector modes. Return the associated insn
57 for load-lanes-style optab OPTAB. The insn must exist. */
58
59 static enum insn_code
60 get_multi_vector_move (tree array_type, convert_optab optab)
61 {
62 enum insn_code icode;
63 enum machine_mode imode;
64 enum machine_mode vmode;
65
66 gcc_assert (TREE_CODE (array_type) == ARRAY_TYPE);
67 imode = TYPE_MODE (array_type);
68 vmode = TYPE_MODE (TREE_TYPE (array_type));
69
70 icode = convert_optab_handler (optab, imode, vmode);
71 gcc_assert (icode != CODE_FOR_nothing);
72 return icode;
73 }
74
75 /* Expand LOAD_LANES call STMT. */
76
77 static void
78 expand_LOAD_LANES (gimple stmt)
79 {
80 struct expand_operand ops[2];
81 tree type, lhs, rhs;
82 rtx target, mem;
83
84 lhs = gimple_call_lhs (stmt);
85 rhs = gimple_call_arg (stmt, 0);
86 type = TREE_TYPE (lhs);
87
88 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
89 mem = expand_normal (rhs);
90
91 gcc_assert (MEM_P (mem));
92 PUT_MODE (mem, TYPE_MODE (type));
93
94 create_output_operand (&ops[0], target, TYPE_MODE (type));
95 create_fixed_operand (&ops[1], mem);
96 expand_insn (get_multi_vector_move (type, vec_load_lanes_optab), 2, ops);
97 }
98
99 /* Expand STORE_LANES call STMT. */
100
101 static void
102 expand_STORE_LANES (gimple stmt)
103 {
104 struct expand_operand ops[2];
105 tree type, lhs, rhs;
106 rtx target, reg;
107
108 lhs = gimple_call_lhs (stmt);
109 rhs = gimple_call_arg (stmt, 0);
110 type = TREE_TYPE (rhs);
111
112 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
113 reg = expand_normal (rhs);
114
115 gcc_assert (MEM_P (target));
116 PUT_MODE (target, TYPE_MODE (type));
117
118 create_fixed_operand (&ops[0], target);
119 create_input_operand (&ops[1], reg, TYPE_MODE (type));
120 expand_insn (get_multi_vector_move (type, vec_store_lanes_optab), 2, ops);
121 }
122
123 static void
124 expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
125 {
126 gcc_unreachable ();
127 }
128
129 /* This should get expanded in adjust_simduid_builtins. */
130
131 static void
132 expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
133 {
134 gcc_unreachable ();
135 }
136
137 /* This should get expanded in adjust_simduid_builtins. */
138
139 static void
140 expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
141 {
142 gcc_unreachable ();
143 }
144
145 /* This should get expanded in adjust_simduid_builtins. */
146
147 static void
148 expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
149 {
150 gcc_unreachable ();
151 }
152
153 /* This should get expanded in the sanopt pass. */
154
155 static void
156 expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
157 {
158 gcc_unreachable ();
159 }
160
161 /* Add sub/add overflow checking to the statement STMT.
162 CODE says whether the operation is +, or -. */
163
164 void
165 ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
166 {
167 rtx res, op0, op1;
168 tree lhs, fn, arg0, arg1;
169 rtx done_label, do_error, target = NULL_RTX;
170
171 lhs = gimple_call_lhs (stmt);
172 arg0 = gimple_call_arg (stmt, 0);
173 arg1 = gimple_call_arg (stmt, 1);
174 done_label = gen_label_rtx ();
175 do_error = gen_label_rtx ();
176 do_pending_stack_adjust ();
177 op0 = expand_normal (arg0);
178 op1 = expand_normal (arg1);
179
180 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
181 if (lhs)
182 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
183
184 enum insn_code icode
185 = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
186 if (icode != CODE_FOR_nothing)
187 {
188 struct expand_operand ops[4];
189 rtx last = get_last_insn ();
190
191 res = gen_reg_rtx (mode);
192 create_output_operand (&ops[0], res, mode);
193 create_input_operand (&ops[1], op0, mode);
194 create_input_operand (&ops[2], op1, mode);
195 create_fixed_operand (&ops[3], do_error);
196 if (maybe_expand_insn (icode, 4, ops))
197 {
198 last = get_last_insn ();
199 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
200 && JUMP_P (last)
201 && any_condjump_p (last)
202 && !find_reg_note (last, REG_BR_PROB, 0))
203 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
204 emit_jump (done_label);
205 }
206 else
207 {
208 delete_insns_since (last);
209 icode = CODE_FOR_nothing;
210 }
211 }
212
213 if (icode == CODE_FOR_nothing)
214 {
215 rtx sub_check = gen_label_rtx ();
216 int pos_neg = 3;
217
218 /* Compute the operation. On RTL level, the addition is always
219 unsigned. */
220 res = expand_binop (mode, code == PLUS_EXPR ? add_optab : sub_optab,
221 op0, op1, NULL_RTX, false, OPTAB_LIB_WIDEN);
222
223 /* If we can prove one of the arguments is always non-negative
224 or always negative, we can do just one comparison and
225 conditional jump instead of 2 at runtime, 3 present in the
226 emitted code. If one of the arguments is CONST_INT, all we
227 need is to make sure it is op1, then the first
228 emit_cmp_and_jump_insns will be just folded. Otherwise try
229 to use range info if available. */
230 if (CONST_INT_P (op0))
231 {
232 rtx tem = op0;
233 op0 = op1;
234 op1 = tem;
235 }
236 else if (CONST_INT_P (op1))
237 ;
238 else if (TREE_CODE (arg0) == SSA_NAME)
239 {
240 double_int arg0_min, arg0_max;
241 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
242 {
243 if (!arg0_min.is_negative ())
244 pos_neg = 1;
245 else if (arg0_max.is_negative ())
246 pos_neg = 2;
247 }
248 if (pos_neg != 3)
249 {
250 rtx tem = op0;
251 op0 = op1;
252 op1 = tem;
253 }
254 }
255 if (pos_neg == 3 && !CONST_INT_P (op1) && TREE_CODE (arg1) == SSA_NAME)
256 {
257 double_int arg1_min, arg1_max;
258 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
259 {
260 if (!arg1_min.is_negative ())
261 pos_neg = 1;
262 else if (arg1_max.is_negative ())
263 pos_neg = 2;
264 }
265 }
266
267 /* If the op1 is negative, we have to use a different check. */
268 if (pos_neg == 3)
269 emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
270 false, sub_check, PROB_EVEN);
271
272 /* Compare the result of the operation with one of the operands. */
273 if (pos_neg & 1)
274 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
275 NULL_RTX, mode, false, done_label,
276 PROB_VERY_LIKELY);
277
278 /* If we get here, we have to print the error. */
279 if (pos_neg == 3)
280 {
281 emit_jump (do_error);
282
283 emit_label (sub_check);
284 }
285
286 /* We have k = a + b for b < 0 here. k <= a must hold. */
287 if (pos_neg & 2)
288 emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
289 NULL_RTX, mode, false, done_label,
290 PROB_VERY_LIKELY);
291 }
292
293 emit_label (do_error);
294 /* Expand the ubsan builtin call. */
295 push_temp_slots ();
296 fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
297 TREE_TYPE (arg0), arg0, arg1);
298 expand_normal (fn);
299 pop_temp_slots ();
300 do_pending_stack_adjust ();
301
302 /* We're done. */
303 emit_label (done_label);
304
305 if (lhs)
306 emit_move_insn (target, res);
307 }
308
309 /* Add negate overflow checking to the statement STMT. */
310
311 void
312 ubsan_expand_si_overflow_neg_check (gimple stmt)
313 {
314 rtx res, op1;
315 tree lhs, fn, arg1;
316 rtx done_label, do_error, target = NULL_RTX;
317
318 lhs = gimple_call_lhs (stmt);
319 arg1 = gimple_call_arg (stmt, 1);
320 done_label = gen_label_rtx ();
321 do_error = gen_label_rtx ();
322
323 do_pending_stack_adjust ();
324 op1 = expand_normal (arg1);
325
326 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
327 if (lhs)
328 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
329
330 enum insn_code icode = optab_handler (negv3_optab, mode);
331 if (icode != CODE_FOR_nothing)
332 {
333 struct expand_operand ops[3];
334 rtx last = get_last_insn ();
335
336 res = gen_reg_rtx (mode);
337 create_output_operand (&ops[0], res, mode);
338 create_input_operand (&ops[1], op1, mode);
339 create_fixed_operand (&ops[2], do_error);
340 if (maybe_expand_insn (icode, 3, ops))
341 {
342 last = get_last_insn ();
343 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
344 && JUMP_P (last)
345 && any_condjump_p (last)
346 && !find_reg_note (last, REG_BR_PROB, 0))
347 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
348 emit_jump (done_label);
349 }
350 else
351 {
352 delete_insns_since (last);
353 icode = CODE_FOR_nothing;
354 }
355 }
356
357 if (icode == CODE_FOR_nothing)
358 {
359 /* Compute the operation. On RTL level, the addition is always
360 unsigned. */
361 res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
362
363 /* Compare the operand with the most negative value. */
364 rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
365 emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
366 done_label, PROB_VERY_LIKELY);
367 }
368
369 emit_label (do_error);
370 /* Expand the ubsan builtin call. */
371 push_temp_slots ();
372 fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
373 TREE_TYPE (arg1), arg1, NULL_TREE);
374 expand_normal (fn);
375 pop_temp_slots ();
376 do_pending_stack_adjust ();
377
378 /* We're done. */
379 emit_label (done_label);
380
381 if (lhs)
382 emit_move_insn (target, res);
383 }
384
385 /* Add mul overflow checking to the statement STMT. */
386
387 void
388 ubsan_expand_si_overflow_mul_check (gimple stmt)
389 {
390 rtx res, op0, op1;
391 tree lhs, fn, arg0, arg1;
392 rtx done_label, do_error, target = NULL_RTX;
393
394 lhs = gimple_call_lhs (stmt);
395 arg0 = gimple_call_arg (stmt, 0);
396 arg1 = gimple_call_arg (stmt, 1);
397 done_label = gen_label_rtx ();
398 do_error = gen_label_rtx ();
399
400 do_pending_stack_adjust ();
401 op0 = expand_normal (arg0);
402 op1 = expand_normal (arg1);
403
404 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
405 if (lhs)
406 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
407
408 enum insn_code icode = optab_handler (mulv4_optab, mode);
409 if (icode != CODE_FOR_nothing)
410 {
411 struct expand_operand ops[4];
412 rtx last = get_last_insn ();
413
414 res = gen_reg_rtx (mode);
415 create_output_operand (&ops[0], res, mode);
416 create_input_operand (&ops[1], op0, mode);
417 create_input_operand (&ops[2], op1, mode);
418 create_fixed_operand (&ops[3], do_error);
419 if (maybe_expand_insn (icode, 4, ops))
420 {
421 last = get_last_insn ();
422 if (profile_status_for_fn (cfun) != PROFILE_ABSENT
423 && JUMP_P (last)
424 && any_condjump_p (last)
425 && !find_reg_note (last, REG_BR_PROB, 0))
426 add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
427 emit_jump (done_label);
428 }
429 else
430 {
431 delete_insns_since (last);
432 icode = CODE_FOR_nothing;
433 }
434 }
435
436 if (icode == CODE_FOR_nothing)
437 {
438 struct separate_ops ops;
439 enum machine_mode hmode
440 = mode_for_size (GET_MODE_PRECISION (mode) / 2, MODE_INT, 1);
441 ops.op0 = arg0;
442 ops.op1 = arg1;
443 ops.op2 = NULL_TREE;
444 ops.location = gimple_location (stmt);
445 if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
446 && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
447 {
448 enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
449 ops.code = WIDEN_MULT_EXPR;
450 ops.type
451 = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
452
453 res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
454 rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
455 GET_MODE_PRECISION (mode), NULL_RTX, 0);
456 hipart = gen_lowpart (mode, hipart);
457 res = gen_lowpart (mode, res);
458 rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
459 GET_MODE_PRECISION (mode) - 1,
460 NULL_RTX, 0);
461 /* RES is low half of the double width result, HIPART
462 the high half. There was overflow if
463 HIPART is different from RES < 0 ? -1 : 0. */
464 emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
465 false, done_label, PROB_VERY_LIKELY);
466 }
467 else if (hmode != BLKmode
468 && 2 * GET_MODE_PRECISION (hmode) == GET_MODE_PRECISION (mode))
469 {
470 rtx large_op0 = gen_label_rtx ();
471 rtx small_op0_large_op1 = gen_label_rtx ();
472 rtx one_small_one_large = gen_label_rtx ();
473 rtx both_ops_large = gen_label_rtx ();
474 rtx after_hipart_neg = gen_label_rtx ();
475 rtx after_lopart_neg = gen_label_rtx ();
476 rtx do_overflow = gen_label_rtx ();
477 rtx hipart_different = gen_label_rtx ();
478
479 int hprec = GET_MODE_PRECISION (hmode);
480 rtx hipart0 = expand_shift (RSHIFT_EXPR, mode, op0, hprec,
481 NULL_RTX, 0);
482 hipart0 = gen_lowpart (hmode, hipart0);
483 rtx lopart0 = gen_lowpart (hmode, op0);
484 rtx signbit0 = expand_shift (RSHIFT_EXPR, hmode, lopart0, hprec - 1,
485 NULL_RTX, 0);
486 rtx hipart1 = expand_shift (RSHIFT_EXPR, mode, op1, hprec,
487 NULL_RTX, 0);
488 hipart1 = gen_lowpart (hmode, hipart1);
489 rtx lopart1 = gen_lowpart (hmode, op1);
490 rtx signbit1 = expand_shift (RSHIFT_EXPR, hmode, lopart1, hprec - 1,
491 NULL_RTX, 0);
492
493 res = gen_reg_rtx (mode);
494
495 /* True if op0 resp. op1 are known to be in the range of
496 halfstype. */
497 bool op0_small_p = false;
498 bool op1_small_p = false;
499 /* True if op0 resp. op1 are known to have all zeros or all ones
500 in the upper half of bits, but are not known to be
501 op{0,1}_small_p. */
502 bool op0_medium_p = false;
503 bool op1_medium_p = false;
504 /* -1 if op{0,1} is known to be negative, 0 if it is known to be
505 nonnegative, 1 if unknown. */
506 int op0_sign = 1;
507 int op1_sign = 1;
508
509 if (TREE_CODE (arg0) == SSA_NAME)
510 {
511 double_int arg0_min, arg0_max;
512 if (get_range_info (arg0, &arg0_min, &arg0_max) == VR_RANGE)
513 {
514 if (arg0_max.sle (double_int::max_value (hprec, false))
515 && double_int::min_value (hprec, false).sle (arg0_min))
516 op0_small_p = true;
517 else if (arg0_max.sle (double_int::max_value (hprec, true))
518 && (~double_int::max_value (hprec,
519 true)).sle (arg0_min))
520 op0_medium_p = true;
521 if (!arg0_min.is_negative ())
522 op0_sign = 0;
523 else if (arg0_max.is_negative ())
524 op0_sign = -1;
525 }
526 }
527 if (TREE_CODE (arg1) == SSA_NAME)
528 {
529 double_int arg1_min, arg1_max;
530 if (get_range_info (arg1, &arg1_min, &arg1_max) == VR_RANGE)
531 {
532 if (arg1_max.sle (double_int::max_value (hprec, false))
533 && double_int::min_value (hprec, false).sle (arg1_min))
534 op1_small_p = true;
535 else if (arg1_max.sle (double_int::max_value (hprec, true))
536 && (~double_int::max_value (hprec,
537 true)).sle (arg1_min))
538 op1_medium_p = true;
539 if (!arg1_min.is_negative ())
540 op1_sign = 0;
541 else if (arg1_max.is_negative ())
542 op1_sign = -1;
543 }
544 }
545
546 int smaller_sign = 1;
547 int larger_sign = 1;
548 if (op0_small_p)
549 {
550 smaller_sign = op0_sign;
551 larger_sign = op1_sign;
552 }
553 else if (op1_small_p)
554 {
555 smaller_sign = op1_sign;
556 larger_sign = op0_sign;
557 }
558 else if (op0_sign == op1_sign)
559 {
560 smaller_sign = op0_sign;
561 larger_sign = op0_sign;
562 }
563
564 if (!op0_small_p)
565 emit_cmp_and_jump_insns (signbit0, hipart0, NE, NULL_RTX, hmode,
566 false, large_op0, PROB_UNLIKELY);
567
568 if (!op1_small_p)
569 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
570 false, small_op0_large_op1,
571 PROB_UNLIKELY);
572
573 /* If both op0 and op1 are sign extended from hmode to mode,
574 the multiplication will never overflow. We can do just one
575 hmode x hmode => mode widening multiplication. */
576 if (GET_CODE (lopart0) == SUBREG)
577 {
578 SUBREG_PROMOTED_VAR_P (lopart0) = 1;
579 SUBREG_PROMOTED_UNSIGNED_SET (lopart0, 0);
580 }
581 if (GET_CODE (lopart1) == SUBREG)
582 {
583 SUBREG_PROMOTED_VAR_P (lopart1) = 1;
584 SUBREG_PROMOTED_UNSIGNED_SET (lopart1, 0);
585 }
586 tree halfstype = build_nonstandard_integer_type (hprec, 0);
587 ops.op0 = make_tree (halfstype, lopart0);
588 ops.op1 = make_tree (halfstype, lopart1);
589 ops.code = WIDEN_MULT_EXPR;
590 ops.type = TREE_TYPE (arg0);
591 rtx thisres
592 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
593 emit_move_insn (res, thisres);
594 emit_jump (done_label);
595
596 emit_label (small_op0_large_op1);
597
598 /* If op0 is sign extended from hmode to mode, but op1 is not,
599 just swap the arguments and handle it as op1 sign extended,
600 op0 not. */
601 rtx larger = gen_reg_rtx (mode);
602 rtx hipart = gen_reg_rtx (hmode);
603 rtx lopart = gen_reg_rtx (hmode);
604 emit_move_insn (larger, op1);
605 emit_move_insn (hipart, hipart1);
606 emit_move_insn (lopart, lopart0);
607 emit_jump (one_small_one_large);
608
609 emit_label (large_op0);
610
611 if (!op1_small_p)
612 emit_cmp_and_jump_insns (signbit1, hipart1, NE, NULL_RTX, hmode,
613 false, both_ops_large, PROB_UNLIKELY);
614
615 /* If op1 is sign extended from hmode to mode, but op0 is not,
616 prepare larger, hipart and lopart pseudos and handle it together
617 with small_op0_large_op1. */
618 emit_move_insn (larger, op0);
619 emit_move_insn (hipart, hipart0);
620 emit_move_insn (lopart, lopart1);
621
622 emit_label (one_small_one_large);
623
624 /* lopart is the low part of the operand that is sign extended
625 to mode, larger is the the other operand, hipart is the
626 high part of larger and lopart0 and lopart1 are the low parts
627 of both operands.
628 We perform lopart0 * lopart1 and lopart * hipart widening
629 multiplications. */
630 tree halfutype = build_nonstandard_integer_type (hprec, 1);
631 ops.op0 = make_tree (halfutype, lopart0);
632 ops.op1 = make_tree (halfutype, lopart1);
633 rtx lo0xlo1
634 = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
635
636 ops.op0 = make_tree (halfutype, lopart);
637 ops.op1 = make_tree (halfutype, hipart);
638 rtx loxhi = gen_reg_rtx (mode);
639 rtx tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
640 emit_move_insn (loxhi, tem);
641
642 /* if (hipart < 0) loxhi -= lopart << (bitsize / 2); */
643 if (larger_sign == 0)
644 emit_jump (after_hipart_neg);
645 else if (larger_sign != -1)
646 emit_cmp_and_jump_insns (hipart, const0_rtx, GE, NULL_RTX, hmode,
647 false, after_hipart_neg, PROB_EVEN);
648
649 tem = expand_shift (LSHIFT_EXPR, mode, lopart, hprec, NULL_RTX, 1);
650 tem = expand_simple_binop (mode, MINUS, loxhi, tem, NULL_RTX,
651 1, OPTAB_DIRECT);
652 emit_move_insn (loxhi, tem);
653
654 emit_label (after_hipart_neg);
655
656 /* if (lopart < 0) loxhi -= larger; */
657 if (smaller_sign == 0)
658 emit_jump (after_lopart_neg);
659 else if (smaller_sign != -1)
660 emit_cmp_and_jump_insns (lopart, const0_rtx, GE, NULL_RTX, hmode,
661 false, after_lopart_neg, PROB_EVEN);
662
663 tem = expand_simple_binop (mode, MINUS, loxhi, larger, NULL_RTX,
664 1, OPTAB_DIRECT);
665 emit_move_insn (loxhi, tem);
666
667 emit_label (after_lopart_neg);
668
669 /* loxhi += (uns) lo0xlo1 >> (bitsize / 2); */
670 tem = expand_shift (RSHIFT_EXPR, mode, lo0xlo1, hprec, NULL_RTX, 1);
671 tem = expand_simple_binop (mode, PLUS, loxhi, tem, NULL_RTX,
672 1, OPTAB_DIRECT);
673 emit_move_insn (loxhi, tem);
674
675 /* if (loxhi >> (bitsize / 2)
676 == (hmode) loxhi >> (bitsize / 2 - 1)) */
677 rtx hipartloxhi = expand_shift (RSHIFT_EXPR, mode, loxhi, hprec,
678 NULL_RTX, 0);
679 hipartloxhi = gen_lowpart (hmode, hipartloxhi);
680 rtx lopartloxhi = gen_lowpart (hmode, loxhi);
681 rtx signbitloxhi = expand_shift (RSHIFT_EXPR, hmode, lopartloxhi,
682 hprec - 1, NULL_RTX, 0);
683
684 emit_cmp_and_jump_insns (signbitloxhi, hipartloxhi, NE, NULL_RTX,
685 hmode, false, do_overflow,
686 PROB_VERY_UNLIKELY);
687
688 /* res = (loxhi << (bitsize / 2)) | (hmode) lo0xlo1; */
689 rtx loxhishifted = expand_shift (LSHIFT_EXPR, mode, loxhi, hprec,
690 NULL_RTX, 1);
691 tem = convert_modes (mode, hmode, gen_lowpart (hmode, lo0xlo1), 1);
692
693 tem = expand_simple_binop (mode, IOR, loxhishifted, tem, res,
694 1, OPTAB_DIRECT);
695 if (tem != res)
696 emit_move_insn (res, tem);
697 emit_jump (done_label);
698
699 emit_label (both_ops_large);
700
701 /* If both operands are large (not sign extended from hmode),
702 then perform the full multiplication which will be the result
703 of the operation. The only cases which don't overflow are
704 some cases where both hipart0 and highpart1 are 0 or -1. */
705 ops.code = MULT_EXPR;
706 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
707 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
708 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
709 emit_move_insn (res, tem);
710
711 if (!op0_medium_p)
712 {
713 tem = expand_simple_binop (hmode, PLUS, hipart0, const1_rtx,
714 NULL_RTX, 1, OPTAB_DIRECT);
715 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
716 true, do_error, PROB_VERY_UNLIKELY);
717 }
718
719 if (!op1_medium_p)
720 {
721 tem = expand_simple_binop (hmode, PLUS, hipart1, const1_rtx,
722 NULL_RTX, 1, OPTAB_DIRECT);
723 emit_cmp_and_jump_insns (tem, const1_rtx, GTU, NULL_RTX, hmode,
724 true, do_error, PROB_VERY_UNLIKELY);
725 }
726
727 /* At this point hipart{0,1} are both in [-1, 0]. If they are the
728 same, overflow happened if res is negative, if they are different,
729 overflow happened if res is positive. */
730 if (op0_sign != 1 && op1_sign != 1 && op0_sign != op1_sign)
731 emit_jump (hipart_different);
732 else if (op0_sign == 1 || op1_sign == 1)
733 emit_cmp_and_jump_insns (hipart0, hipart1, NE, NULL_RTX, hmode,
734 true, hipart_different, PROB_EVEN);
735
736 emit_cmp_and_jump_insns (res, const0_rtx, LT, NULL_RTX, mode, false,
737 do_error, PROB_VERY_UNLIKELY);
738 emit_jump (done_label);
739
740 emit_label (hipart_different);
741
742 emit_cmp_and_jump_insns (res, const0_rtx, GE, NULL_RTX, mode, false,
743 do_error, PROB_VERY_UNLIKELY);
744 emit_jump (done_label);
745
746 emit_label (do_overflow);
747
748 /* Overflow, do full multiplication and fallthru into do_error. */
749 ops.op0 = make_tree (TREE_TYPE (arg0), op0);
750 ops.op1 = make_tree (TREE_TYPE (arg0), op1);
751 tem = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
752 emit_move_insn (res, tem);
753 }
754 else
755 {
756 ops.code = MULT_EXPR;
757 ops.type = TREE_TYPE (arg0);
758 res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
759 emit_jump (done_label);
760 }
761 }
762
763 emit_label (do_error);
764 /* Expand the ubsan builtin call. */
765 push_temp_slots ();
766 fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
767 TREE_TYPE (arg0), arg0, arg1);
768 expand_normal (fn);
769 pop_temp_slots ();
770 do_pending_stack_adjust ();
771
772 /* We're done. */
773 emit_label (done_label);
774
775 if (lhs)
776 emit_move_insn (target, res);
777 }
778
779 /* Expand UBSAN_CHECK_ADD call STMT. */
780
781 static void
782 expand_UBSAN_CHECK_ADD (gimple stmt)
783 {
784 ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
785 }
786
787 /* Expand UBSAN_CHECK_SUB call STMT. */
788
789 static void
790 expand_UBSAN_CHECK_SUB (gimple stmt)
791 {
792 if (integer_zerop (gimple_call_arg (stmt, 0)))
793 ubsan_expand_si_overflow_neg_check (stmt);
794 else
795 ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
796 }
797
798 /* Expand UBSAN_CHECK_MUL call STMT. */
799
800 static void
801 expand_UBSAN_CHECK_MUL (gimple stmt)
802 {
803 ubsan_expand_si_overflow_mul_check (stmt);
804 }
805
806 /* This should get folded in tree-vectorizer.c. */
807
808 static void
809 expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
810 {
811 gcc_unreachable ();
812 }
813
814 static void
815 expand_MASK_LOAD (gimple stmt)
816 {
817 struct expand_operand ops[3];
818 tree type, lhs, rhs, maskt;
819 rtx mem, target, mask;
820
821 maskt = gimple_call_arg (stmt, 2);
822 lhs = gimple_call_lhs (stmt);
823 type = TREE_TYPE (lhs);
824 rhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
825 gimple_call_arg (stmt, 1));
826
827 mem = expand_expr (rhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
828 gcc_assert (MEM_P (mem));
829 mask = expand_normal (maskt);
830 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
831 create_output_operand (&ops[0], target, TYPE_MODE (type));
832 create_fixed_operand (&ops[1], mem);
833 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
834 expand_insn (optab_handler (maskload_optab, TYPE_MODE (type)), 3, ops);
835 }
836
837 static void
838 expand_MASK_STORE (gimple stmt)
839 {
840 struct expand_operand ops[3];
841 tree type, lhs, rhs, maskt;
842 rtx mem, reg, mask;
843
844 maskt = gimple_call_arg (stmt, 2);
845 rhs = gimple_call_arg (stmt, 3);
846 type = TREE_TYPE (rhs);
847 lhs = fold_build2 (MEM_REF, type, gimple_call_arg (stmt, 0),
848 gimple_call_arg (stmt, 1));
849
850 mem = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
851 gcc_assert (MEM_P (mem));
852 mask = expand_normal (maskt);
853 reg = expand_normal (rhs);
854 create_fixed_operand (&ops[0], mem);
855 create_input_operand (&ops[1], reg, TYPE_MODE (type));
856 create_input_operand (&ops[2], mask, TYPE_MODE (TREE_TYPE (maskt)));
857 expand_insn (optab_handler (maskstore_optab, TYPE_MODE (type)), 3, ops);
858 }
859
860 /* Routines to expand each internal function, indexed by function number.
861 Each routine has the prototype:
862
863 expand_<NAME> (gimple stmt)
864
865 where STMT is the statement that performs the call. */
866 static void (*const internal_fn_expanders[]) (gimple) = {
867 #define DEF_INTERNAL_FN(CODE, FLAGS) expand_##CODE,
868 #include "internal-fn.def"
869 #undef DEF_INTERNAL_FN
870 0
871 };
872
873 /* Expand STMT, which is a call to internal function FN. */
874
875 void
876 expand_internal_call (gimple stmt)
877 {
878 internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
879 }